diff --git a/.codecov.yml b/.codecov.yml index a628d33cbec5..326dd3e0b29e 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -4,13 +4,6 @@ # Can be validated via instructions at: # https://docs.codecov.io/docs/codecov-yaml#validate-your-repository-yaml -# Tell Codecov not to send a coverage notification until (at least) 2 builds are completed -# Since we run Unit & Integration tests in parallel, this lets Codecov know that coverage -# needs to be merged across those builds -codecov: - notify: - after_n_builds: 2 - # Settings related to code coverage analysis coverage: status: diff --git a/.dockerignore b/.dockerignore index 0e42960dc9c0..7d3bdc2b4b0d 100644 --- a/.dockerignore +++ b/.dockerignore @@ -6,6 +6,5 @@ dspace/modules/*/target/ Dockerfile.* dspace/src/main/docker/dspace-postgres-pgcrypto dspace/src/main/docker/dspace-postgres-pgcrypto-curl -dspace/src/main/docker/solr dspace/src/main/docker/README.md dspace/src/main/docker-compose/ diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index dcb98747ba1e..4006656354af 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -88,6 +88,39 @@ jobs: name: ${{ matrix.type }} results path: ${{ matrix.resultsdir }} - # https://github.com/codecov/codecov-action + # Upload code coverage report to artifact, so that it can be shared with the 'codecov' job (see below) + - name: Upload code coverage report to Artifact + uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.type }} coverage report + path: 'dspace/target/site/jacoco-aggregate/jacoco.xml' + retention-days: 14 + + # Codecov upload is a separate job in order to allow us to restart this separate from the entire build/test + # job above. This is necessary because Codecov uploads seem to randomly fail at times. + # See https://community.codecov.com/t/upload-issues-unable-to-locate-build-via-github-actions-api/3954 + codecov: + # Must run after 'tests' job above + needs: tests + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + + # Download artifacts from previous 'tests' job + - name: Download coverage artifacts + uses: actions/download-artifact@v3 + + # Now attempt upload to Codecov using its action. + # NOTE: We use a retry action to retry the Codecov upload if it fails the first time. + # + # Retry action: https://github.com/marketplace/actions/retry-action + # Codecov action: https://github.com/codecov/codecov-action - name: Upload coverage to Codecov.io - uses: codecov/codecov-action@v3 + uses: Wandalen/wretry.action@v1.0.36 + with: + action: codecov/codecov-action@v3 + # Try upload 5 times max + attempt_limit: 5 + # Run again in 30 seconds + attempt_delay: 30000 diff --git a/.github/workflows/codescan.yml b/.github/workflows/codescan.yml index 7580b4ba3dc3..9e6dcc0b23af 100644 --- a/.github/workflows/codescan.yml +++ b/.github/workflows/codescan.yml @@ -5,12 +5,16 @@ # because CodeQL requires a fresh build with all tests *disabled*. name: "Code Scanning" -# Run this code scan for all pushes / PRs to main branch. Also run once a week. +# Run this code scan for all pushes / PRs to main or maintenance branches. Also run once a week. on: push: - branches: [ main ] + branches: + - main + - 'dspace-**' pull_request: - branches: [ main ] + branches: + - main + - 'dspace-**' # Don't run if PR is only updating static documentation paths-ignore: - '**/*.md' diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 64e12f01aac0..b7979e589653 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -78,7 +78,7 @@ jobs: # https://github.com/docker/build-push-action - name: Build and push 'dspace-dependencies' image id: docker_build_deps - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v4 with: context: . file: ./Dockerfile.dependencies @@ -90,9 +90,38 @@ jobs: tags: ${{ steps.meta_build_deps.outputs.tags }} labels: ${{ steps.meta_build_deps.outputs.labels }} - ####################################### - # Build/Push the 'dspace/dspace' image - ####################################### + ####################################### + # Build/Push the 'dspace/dspace' image + ####################################### + dspace: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' + # Must run after 'dspace-dependencies' job above + needs: dspace-dependencies + runs-on: ubuntu-latest + + steps: + # https://github.com/actions/checkout + - name: Checkout codebase + uses: actions/checkout@v3 + + # https://github.com/docker/setup-buildx-action + - name: Setup Docker Buildx + uses: docker/setup-buildx-action@v2 + + # https://github.com/docker/setup-qemu-action + - name: Set up QEMU emulation to build for multiple architectures + uses: docker/setup-qemu-action@v2 + + # https://github.com/docker/login-action + - name: Login to DockerHub + # Only login if not a PR, as PRs only trigger a Docker build and not a push + if: github.event_name != 'pull_request' + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + # Get Metadata for docker_build step below - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace' image id: meta_build @@ -104,7 +133,7 @@ jobs: - name: Build and push 'dspace' image id: docker_build - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v4 with: context: . file: ./Dockerfile @@ -116,9 +145,38 @@ jobs: tags: ${{ steps.meta_build.outputs.tags }} labels: ${{ steps.meta_build.outputs.labels }} - ##################################################### - # Build/Push the 'dspace/dspace' image ('-test' tag) - ##################################################### + ############################################################# + # Build/Push the 'dspace/dspace' image ('-test' tag) + ############################################################# + dspace-test: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' + # Must run after 'dspace-dependencies' job above + needs: dspace-dependencies + runs-on: ubuntu-latest + + steps: + # https://github.com/actions/checkout + - name: Checkout codebase + uses: actions/checkout@v3 + + # https://github.com/docker/setup-buildx-action + - name: Setup Docker Buildx + uses: docker/setup-buildx-action@v2 + + # https://github.com/docker/setup-qemu-action + - name: Set up QEMU emulation to build for multiple architectures + uses: docker/setup-qemu-action@v2 + + # https://github.com/docker/login-action + - name: Login to DockerHub + # Only login if not a PR, as PRs only trigger a Docker build and not a push + if: github.event_name != 'pull_request' + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + # Get Metadata for docker_build_test step below - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-test' image id: meta_build_test @@ -133,7 +191,7 @@ jobs: - name: Build and push 'dspace-test' image id: docker_build_test - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v4 with: context: . file: ./Dockerfile.test @@ -145,9 +203,38 @@ jobs: tags: ${{ steps.meta_build_test.outputs.tags }} labels: ${{ steps.meta_build_test.outputs.labels }} - ########################################### - # Build/Push the 'dspace/dspace-cli' image - ########################################### + ########################################### + # Build/Push the 'dspace/dspace-cli' image + ########################################### + dspace-cli: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' + # Must run after 'dspace-dependencies' job above + needs: dspace-dependencies + runs-on: ubuntu-latest + + steps: + # https://github.com/actions/checkout + - name: Checkout codebase + uses: actions/checkout@v3 + + # https://github.com/docker/setup-buildx-action + - name: Setup Docker Buildx + uses: docker/setup-buildx-action@v2 + + # https://github.com/docker/setup-qemu-action + - name: Set up QEMU emulation to build for multiple architectures + uses: docker/setup-qemu-action@v2 + + # https://github.com/docker/login-action + - name: Login to DockerHub + # Only login if not a PR, as PRs only trigger a Docker build and not a push + if: github.event_name != 'pull_request' + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + # Get Metadata for docker_build_test step below - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-cli' image id: meta_build_cli @@ -159,7 +246,7 @@ jobs: - name: Build and push 'dspace-cli' image id: docker_build_cli - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v4 with: context: . file: ./Dockerfile.cli @@ -170,3 +257,167 @@ jobs: # Use tags / labels provided by 'docker/metadata-action' above tags: ${{ steps.meta_build_cli.outputs.tags }} labels: ${{ steps.meta_build_cli.outputs.labels }} + + ########################################### + # Build/Push the 'dspace/dspace-solr' image + ########################################### + dspace-solr: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' + runs-on: ubuntu-latest + + steps: + # https://github.com/actions/checkout + - name: Checkout codebase + uses: actions/checkout@v3 + + # https://github.com/docker/setup-buildx-action + - name: Setup Docker Buildx + uses: docker/setup-buildx-action@v2 + + # https://github.com/docker/setup-qemu-action + - name: Set up QEMU emulation to build for multiple architectures + uses: docker/setup-qemu-action@v2 + + # https://github.com/docker/login-action + - name: Login to DockerHub + # Only login if not a PR, as PRs only trigger a Docker build and not a push + if: github.event_name != 'pull_request' + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + + # Get Metadata for docker_build_solr step below + - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-solr' image + id: meta_build_solr + uses: docker/metadata-action@v4 + with: + images: dspace/dspace-solr + tags: ${{ env.IMAGE_TAGS }} + flavor: ${{ env.TAGS_FLAVOR }} + + - name: Build and push 'dspace-solr' image + id: docker_build_solr + uses: docker/build-push-action@v4 + with: + context: . + file: ./dspace/src/main/docker/dspace-solr/Dockerfile + platforms: ${{ env.PLATFORMS }} + # For pull requests, we run the Docker build (to ensure no PR changes break the build), + # but we ONLY do an image push to DockerHub if it's NOT a PR + push: ${{ github.event_name != 'pull_request' }} + # Use tags / labels provided by 'docker/metadata-action' above + tags: ${{ steps.meta_build_solr.outputs.tags }} + labels: ${{ steps.meta_build_solr.outputs.labels }} + + ########################################################### + # Build/Push the 'dspace/dspace-postgres-pgcrypto' image + ########################################################### + dspace-postgres-pgcrypto: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' + runs-on: ubuntu-latest + + steps: + # https://github.com/actions/checkout + - name: Checkout codebase + uses: actions/checkout@v3 + + # https://github.com/docker/setup-buildx-action + - name: Setup Docker Buildx + uses: docker/setup-buildx-action@v2 + + # https://github.com/docker/setup-qemu-action + - name: Set up QEMU emulation to build for multiple architectures + uses: docker/setup-qemu-action@v2 + + # https://github.com/docker/login-action + - name: Login to DockerHub + # Only login if not a PR, as PRs only trigger a Docker build and not a push + if: github.event_name != 'pull_request' + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + + # Get Metadata for docker_build_postgres step below + - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto' image + id: meta_build_postgres + uses: docker/metadata-action@v4 + with: + images: dspace/dspace-postgres-pgcrypto + tags: ${{ env.IMAGE_TAGS }} + flavor: ${{ env.TAGS_FLAVOR }} + + - name: Build and push 'dspace-postgres-pgcrypto' image + id: docker_build_postgres + uses: docker/build-push-action@v4 + with: + # Must build out of subdirectory to have access to install script for pgcrypto + context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/ + dockerfile: Dockerfile + platforms: ${{ env.PLATFORMS }} + # For pull requests, we run the Docker build (to ensure no PR changes break the build), + # but we ONLY do an image push to DockerHub if it's NOT a PR + push: ${{ github.event_name != 'pull_request' }} + # Use tags / labels provided by 'docker/metadata-action' above + tags: ${{ steps.meta_build_postgres.outputs.tags }} + labels: ${{ steps.meta_build_postgres.outputs.labels }} + + ######################################################################## + # Build/Push the 'dspace/dspace-postgres-pgcrypto' image (-loadsql tag) + ######################################################################## + dspace-postgres-pgcrypto-loadsql: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' + runs-on: ubuntu-latest + + steps: + # https://github.com/actions/checkout + - name: Checkout codebase + uses: actions/checkout@v3 + + # https://github.com/docker/setup-buildx-action + - name: Setup Docker Buildx + uses: docker/setup-buildx-action@v2 + + # https://github.com/docker/setup-qemu-action + - name: Set up QEMU emulation to build for multiple architectures + uses: docker/setup-qemu-action@v2 + + # https://github.com/docker/login-action + - name: Login to DockerHub + # Only login if not a PR, as PRs only trigger a Docker build and not a push + if: github.event_name != 'pull_request' + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + + # Get Metadata for docker_build_postgres_loadsql step below + - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto-loadsql' image + id: meta_build_postgres_loadsql + uses: docker/metadata-action@v4 + with: + images: dspace/dspace-postgres-pgcrypto + tags: ${{ env.IMAGE_TAGS }} + # Suffix all tags with "-loadsql". Otherwise, it uses the same + # tagging logic as the primary 'dspace/dspace-postgres-pgcrypto' image above. + flavor: ${{ env.TAGS_FLAVOR }} + suffix=-loadsql + + - name: Build and push 'dspace-postgres-pgcrypto-loadsql' image + id: docker_build_postgres_loadsql + uses: docker/build-push-action@v4 + with: + # Must build out of subdirectory to have access to install script for pgcrypto + context: ./dspace/src/main/docker/dspace-postgres-pgcrypto-curl/ + dockerfile: Dockerfile + platforms: ${{ env.PLATFORMS }} + # For pull requests, we run the Docker build (to ensure no PR changes break the build), + # but we ONLY do an image push to DockerHub if it's NOT a PR + push: ${{ github.event_name != 'pull_request' }} + # Use tags / labels provided by 'docker/metadata-action' above + tags: ${{ steps.meta_build_postgres_loadsql.outputs.tags }} + labels: ${{ steps.meta_build_postgres_loadsql.outputs.labels }} \ No newline at end of file diff --git a/.github/workflows/issue_opened.yml b/.github/workflows/issue_opened.yml index 5d7c1c30f7d3..b4436dca3aad 100644 --- a/.github/workflows/issue_opened.yml +++ b/.github/workflows/issue_opened.yml @@ -16,7 +16,7 @@ jobs: # Only add to project board if issue is flagged as "needs triage" or has no labels # NOTE: By default we flag new issues as "needs triage" in our issue template if: (contains(github.event.issue.labels.*.name, 'needs triage') || join(github.event.issue.labels.*.name) == '') - uses: actions/add-to-project@v0.3.0 + uses: actions/add-to-project@v0.5.0 # Note, the authentication token below is an ORG level Secret. # It must be created/recreated manually via a personal access token with admin:org, project, public_repo permissions # See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token#permissions-for-the-github_token diff --git a/.github/workflows/label_merge_conflicts.yml b/.github/workflows/label_merge_conflicts.yml index d71d244c2b02..a023f4eef246 100644 --- a/.github/workflows/label_merge_conflicts.yml +++ b/.github/workflows/label_merge_conflicts.yml @@ -1,11 +1,12 @@ # This workflow checks open PRs for merge conflicts and labels them when conflicts are found name: Check for merge conflicts -# Run whenever the "main" branch is updated -# NOTE: This means merge conflicts are only checked for when a PR is merged to main. +# Run this for all pushes (i.e. merges) to 'main' or maintenance branches on: push: - branches: [ main ] + branches: + - main + - 'dspace-**' # So that the `conflict_label_name` is removed if conflicts are resolved, # we allow this to run for `pull_request_target` so that github secrets are available. pull_request_target: @@ -23,7 +24,9 @@ jobs: steps: # See: https://github.com/prince-chrismc/label-merge-conflicts-action - name: Auto-label PRs with merge conflicts - uses: prince-chrismc/label-merge-conflicts-action@v2 + uses: prince-chrismc/label-merge-conflicts-action@v3 + # Ignore any failures -- may occur (randomly?) for older, outdated PRs. + continue-on-error: true # Add "merge conflict" label if a merge conflict is detected. Remove it when resolved. # Note, the authentication token is created automatically # See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token diff --git a/.github/workflows/port_merged_pull_request.yml b/.github/workflows/port_merged_pull_request.yml new file mode 100644 index 000000000000..109835d14d3c --- /dev/null +++ b/.github/workflows/port_merged_pull_request.yml @@ -0,0 +1,46 @@ +# This workflow will attempt to port a merged pull request to +# the branch specified in a "port to" label (if exists) +name: Port merged Pull Request + +# Only run for merged PRs against the "main" or maintenance branches +# We allow this to run for `pull_request_target` so that github secrets are available +# (This is required when the PR comes from a forked repo) +on: + pull_request_target: + types: [ closed ] + branches: + - main + - 'dspace-**' + +permissions: + contents: write # so action can add comments + pull-requests: write # so action can create pull requests + +jobs: + port_pr: + runs-on: ubuntu-latest + # Don't run on closed *unmerged* pull requests + if: github.event.pull_request.merged + steps: + # Checkout code + - uses: actions/checkout@v3 + # Port PR to other branch (ONLY if labeled with "port to") + # See https://github.com/korthout/backport-action + - name: Create backport pull requests + uses: korthout/backport-action@v1 + with: + # Trigger based on a "port to [branch]" label on PR + # (This label must specify the branch name to port to) + label_pattern: '^port to ([^ ]+)$' + # Title to add to the (newly created) port PR + pull_title: '[Port ${target_branch}] ${pull_title}' + # Description to add to the (newly created) port PR + pull_description: 'Port of #${pull_number} by @${pull_author} to `${target_branch}`.' + # Copy all labels from original PR to (newly created) port PR + # NOTE: The labels matching 'label_pattern' are automatically excluded + copy_labels_pattern: '.*' + # Skip any merge commits in the ported PR. This means only non-merge commits are cherry-picked to the new PR + merge_commits: 'skip' + # Use a personal access token (PAT) to create PR as 'dspace-bot' user. + # A PAT is required in order for the new PR to trigger its own actions (for CI checks) + github_token: ${{ secrets.PR_PORT_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/pull_request_opened.yml b/.github/workflows/pull_request_opened.yml new file mode 100644 index 000000000000..9b61af72d187 --- /dev/null +++ b/.github/workflows/pull_request_opened.yml @@ -0,0 +1,24 @@ +# This workflow runs whenever a new pull request is created +name: Pull Request opened + +# Only run for newly opened PRs against the "main" or maintenance branches +# We allow this to run for `pull_request_target` so that github secrets are available +# (This is required to assign a PR back to the creator when the PR comes from a forked repo) +on: + pull_request_target: + types: [ opened ] + branches: + - main + - 'dspace-**' + +permissions: + pull-requests: write + +jobs: + automation: + runs-on: ubuntu-latest + steps: + # Assign the PR to whomever created it. This is useful for visualizing assignments on project boards + # See https://github.com/toshimaru/auto-author-assign + - name: Assign PR to creator + uses: toshimaru/auto-author-assign@v1.6.2 diff --git a/Dockerfile b/Dockerfile index 444a1bcf0b55..9c32ecb50cd4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -31,7 +31,7 @@ ARG TARGET_DIR=dspace-installer COPY --from=build /install /dspace-src WORKDIR /dspace-src # Create the initial install deployment using ANT -ENV ANT_VERSION 1.10.12 +ENV ANT_VERSION 1.10.13 ENV ANT_HOME /tmp/ant-$ANT_VERSION ENV PATH $ANT_HOME/bin:$PATH # Need wget to install ant @@ -50,7 +50,7 @@ RUN ant init_installation update_configs update_code update_webapps FROM tomcat:9-jdk${JDK_VERSION} # NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration. ENV DSPACE_INSTALL=/dspace -# Copy the /dspace directory from 'ant_build' containger to /dspace in this container +# Copy the /dspace directory from 'ant_build' container to /dspace in this container COPY --from=ant_build /dspace $DSPACE_INSTALL # Expose Tomcat port and AJP port EXPOSE 8080 8009 diff --git a/Dockerfile.cli b/Dockerfile.cli index 76e559fc83c3..62e83b79ef02 100644 --- a/Dockerfile.cli +++ b/Dockerfile.cli @@ -30,12 +30,12 @@ ARG TARGET_DIR=dspace-installer COPY --from=build /install /dspace-src WORKDIR /dspace-src # Create the initial install deployment using ANT -ENV ANT_VERSION 1.10.12 +ENV ANT_VERSION 1.10.13 ENV ANT_HOME /tmp/ant-$ANT_VERSION ENV PATH $ANT_HOME/bin:$PATH -# Need wget to install ant +# Need wget to install ant, and unzip for managing AIPs RUN apt-get update \ - && apt-get install -y --no-install-recommends wget \ + && apt-get install -y --no-install-recommends wget unzip \ && apt-get purge -y --auto-remove \ && rm -rf /var/lib/apt/lists/* # Download and install 'ant' diff --git a/LICENSES_THIRD_PARTY b/LICENSES_THIRD_PARTY index b96ea77648a6..e494c80c5d6e 100644 --- a/LICENSES_THIRD_PARTY +++ b/LICENSES_THIRD_PARTY @@ -26,7 +26,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.12.261 - https://aws.amazon.com/sdkforjava) * JMES Path Query library (com.amazonaws:jmespath-java:1.12.261 - https://aws.amazon.com/sdkforjava) * HPPC Collections (com.carrotsearch:hppc:0.8.1 - http://labs.carrotsearch.com/hppc.html/hppc) - * com.drewnoakes:metadata-extractor (com.drewnoakes:metadata-extractor:2.16.0 - https://drewnoakes.com/code/exif/) + * com.drewnoakes:metadata-extractor (com.drewnoakes:metadata-extractor:2.18.0 - https://drewnoakes.com/code/exif/) * parso (com.epam:parso:2.0.14 - https://github.com/epam/parso) * Esri Geometry API for Java (com.esri.geometry:esri-geometry-api:2.2.0 - https://github.com/Esri/geometry-api-java) * ClassMate (com.fasterxml:classmate:1.3.0 - http://github.com/cowtowncoder/java-classmate) @@ -34,12 +34,12 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Jackson-core (com.fasterxml.jackson.core:jackson-core:2.13.4 - https://github.com/FasterXML/jackson-core) * jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.13.4.2 - http://github.com/FasterXML/jackson) * Jackson dataformat: CBOR (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.12.6 - http://github.com/FasterXML/jackson-dataformats-binary) - * Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.12.3 - http://github.com/FasterXML/jackson-dataformats-binary) + * Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.13.3 - http://github.com/FasterXML/jackson-dataformats-binary) * Jackson-dataformat-YAML (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1 - https://github.com/FasterXML/jackson-dataformats-text) - * Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8) + * Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.13.5 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8) * Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.11.1 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) - * Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) - * Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names) + * Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.13.5 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) + * Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.13.5 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names) * Java UUID Generator (com.fasterxml.uuid:java-uuid-generator:4.0.1 - https://github.com/cowtowncoder/java-uuid-generator) * Woodstox (com.fasterxml.woodstox:woodstox-core:6.2.4 - https://github.com/FasterXML/woodstox) * zjsonpatch (com.flipkart.zjsonpatch:zjsonpatch:0.4.6 - https://github.com/flipkart-incubator/zjsonpatch/) @@ -56,19 +56,19 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Google Analytics API v3-rev145-1.23.0 (com.google.apis:google-api-services-analytics:v3-rev145-1.23.0 - http://nexus.sonatype.org/oss-repository-hosting.html/google-api-services-analytics) * FindBugs-jsr305 (com.google.code.findbugs:jsr305:3.0.1 - http://findbugs.sourceforge.net/) * Gson (com.google.code.gson:gson:2.9.0 - https://github.com/google/gson/gson) - * error-prone annotations (com.google.errorprone:error_prone_annotations:2.7.1 - http://nexus.sonatype.org/oss-repository-hosting.html/error_prone_parent/error_prone_annotations) + * error-prone annotations (com.google.errorprone:error_prone_annotations:2.18.0 - https://errorprone.info/error_prone_annotations) * Guava InternalFutureFailureAccess and InternalFutures (com.google.guava:failureaccess:1.0.1 - https://github.com/google/guava/failureaccess) - * Guava: Google Core Libraries for Java (com.google.guava:guava:31.0.1-jre - https://github.com/google/guava) + * Guava: Google Core Libraries for Java (com.google.guava:guava:32.0.0-jre - https://github.com/google/guava) * Guava: Google Core Libraries for Java (JDK5 Backport) (com.google.guava:guava-jdk5:17.0 - http://code.google.com/p/guava-libraries/guava-jdk5) * Guava ListenableFuture only (com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava - https://github.com/google/guava/listenablefuture) * Google HTTP Client Library for Java (com.google.http-client:google-http-client:1.23.0 - https://github.com/google/google-http-java-client/google-http-client) * GSON extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-gson:1.41.7 - https://github.com/googleapis/google-http-java-client/google-http-client-gson) * Jackson 2 extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-jackson2:1.23.0 - https://github.com/google/google-http-java-client/google-http-client-jackson2) - * J2ObjC Annotations (com.google.j2objc:j2objc-annotations:1.3 - https://github.com/google/j2objc/) + * J2ObjC Annotations (com.google.j2objc:j2objc-annotations:2.8 - https://github.com/google/j2objc/) * Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.33.3 - https://github.com/googleapis/google-oauth-java-client/google-oauth-client) * ConcurrentLinkedHashMap (com.googlecode.concurrentlinkedhashmap:concurrentlinkedhashmap-lru:1.4.2 - http://code.google.com/p/concurrentlinkedhashmap) * libphonenumber (com.googlecode.libphonenumber:libphonenumber:8.11.1 - https://github.com/google/libphonenumber/) - * Jackcess (com.healthmarketscience.jackcess:jackcess:4.0.1 - https://jackcess.sourceforge.io) + * Jackcess (com.healthmarketscience.jackcess:jackcess:4.0.2 - https://jackcess.sourceforge.io) * Jackcess Encrypt (com.healthmarketscience.jackcess:jackcess-encrypt:4.0.1 - http://jackcessencrypt.sf.net) * project ':json-path' (com.jayway.jsonpath:json-path:2.6.0 - https://github.com/jayway/JsonPath) * project ':json-path-assert' (com.jayway.jsonpath:json-path-assert:2.6.0 - https://github.com/jayway/JsonPath) @@ -79,11 +79,20 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Nimbus JOSE+JWT (com.nimbusds:nimbus-jose-jwt:7.9 - https://bitbucket.org/connect2id/nimbus-jose-jwt) * opencsv (com.opencsv:opencsv:5.6 - http://opencsv.sf.net) * java-libpst (com.pff:java-libpst:0.9.3 - https://github.com/rjohnsondev/java-libpst) - * rome (com.rometools:rome:1.18.0 - http://rometools.com/rome) - * rome-modules (com.rometools:rome-modules:1.18.0 - http://rometools.com/rome-modules) - * rome-utils (com.rometools:rome-utils:1.18.0 - http://rometools.com/rome-utils) + * rome (com.rometools:rome:1.19.0 - http://rometools.com/rome) + * rome-modules (com.rometools:rome-modules:1.19.0 - http://rometools.com/rome-modules) + * rome-utils (com.rometools:rome-utils:1.19.0 - http://rometools.com/rome-utils) * fastinfoset (com.sun.xml.fastinfoset:FastInfoset:1.2.15 - http://fi.java.net) * T-Digest (com.tdunning:t-digest:3.1 - https://github.com/tdunning/t-digest) + * config (com.typesafe:config:1.3.3 - https://github.com/lightbend/config) + * ssl-config-core (com.typesafe:ssl-config-core_2.13:0.3.8 - https://github.com/lightbend/ssl-config) + * akka-actor (com.typesafe.akka:akka-actor_2.13:2.5.31 - https://akka.io/) + * akka-http-core (com.typesafe.akka:akka-http-core_2.13:10.1.12 - https://akka.io) + * akka-http (com.typesafe.akka:akka-http_2.13:10.1.12 - https://akka.io) + * akka-parsing (com.typesafe.akka:akka-parsing_2.13:10.1.12 - https://akka.io) + * akka-protobuf (com.typesafe.akka:akka-protobuf_2.13:2.5.31 - https://akka.io/) + * akka-stream (com.typesafe.akka:akka-stream_2.13:2.5.31 - https://akka.io/) + * scala-logging (com.typesafe.scala-logging:scala-logging_2.13:3.9.2 - https://github.com/lightbend/scala-logging) * JSON library from Android SDK (com.vaadin.external.google:android-json:0.0.20131108.vaadin1 - http://developer.android.com/sdk) * SparseBitSet (com.zaxxer:SparseBitSet:1.2 - https://github.com/brettwooldridge/SparseBitSet) * Apache Commons BeanUtils (commons-beanutils:commons-beanutils:1.9.4 - https://commons.apache.org/proper/commons-beanutils/) @@ -91,20 +100,19 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Apache Commons Codec (commons-codec:commons-codec:1.10 - http://commons.apache.org/proper/commons-codec/) * Apache Commons Collections (commons-collections:commons-collections:3.2.2 - http://commons.apache.org/collections/) * Commons Digester (commons-digester:commons-digester:1.8.1 - http://commons.apache.org/digester/) - * Apache Commons FileUpload (commons-fileupload:commons-fileupload:1.3.3 - http://commons.apache.org/proper/commons-fileupload/) + * Apache Commons FileUpload (commons-fileupload:commons-fileupload:1.5 - https://commons.apache.org/proper/commons-fileupload/) * Apache Commons IO (commons-io:commons-io:2.7 - https://commons.apache.org/proper/commons-io/) * Commons Lang (commons-lang:commons-lang:2.6 - http://commons.apache.org/lang/) * Apache Commons Logging (commons-logging:commons-logging:1.2 - http://commons.apache.org/proper/commons-logging/) * Apache Commons Validator (commons-validator:commons-validator:1.5.0 - http://commons.apache.org/proper/commons-validator/) * GeoJson POJOs for Jackson (de.grundid.opendatalab:geojson-jackson:1.14 - https://github.com/opendatalab-de/geojson-jackson) - * Boilerpipe -- Boilerplate Removal and Fulltext Extraction from HTML pages (de.l3s.boilerpipe:boilerpipe:1.1.0 - http://code.google.com/p/boilerpipe/) * OpenAIRE Funders Model (eu.openaire:funders-model:2.0.0 - https://api.openaire.eu) * Metrics Core (io.dropwizard.metrics:metrics-core:4.1.5 - https://metrics.dropwizard.io/metrics-core) * Graphite Integration for Metrics (io.dropwizard.metrics:metrics-graphite:4.1.5 - https://metrics.dropwizard.io/metrics-graphite) * Metrics Integration for Jetty 9.3 and higher (io.dropwizard.metrics:metrics-jetty9:4.1.5 - https://metrics.dropwizard.io/metrics-jetty9) * Metrics Integration with JMX (io.dropwizard.metrics:metrics-jmx:4.1.5 - https://metrics.dropwizard.io/metrics-jmx) * JVM Integration for Metrics (io.dropwizard.metrics:metrics-jvm:4.1.5 - https://metrics.dropwizard.io/metrics-jvm) - * micrometer-core (io.micrometer:micrometer-core:1.8.6 - https://github.com/micrometer-metrics/micrometer) + * micrometer-core (io.micrometer:micrometer-core:1.9.11 - https://github.com/micrometer-metrics/micrometer) * Netty/Buffer (io.netty:netty-buffer:4.1.68.Final - https://netty.io/netty-buffer/) * Netty/Codec (io.netty:netty-codec:4.1.68.Final - https://netty.io/netty-codec/) * Netty/Codec/HTTP (io.netty:netty-codec-http:4.1.53.Final - https://netty.io/netty-codec-http/) @@ -188,88 +196,87 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Kerby-kerb Util (org.apache.kerby:kerb-util:1.0.1 - http://directory.apache.org/kerby/kerby-kerb/kerb-util) * Kerby ASN1 Project (org.apache.kerby:kerby-asn1:1.0.1 - http://directory.apache.org/kerby/kerby-common/kerby-asn1) * Kerby PKIX Project (org.apache.kerby:kerby-pkix:1.0.1 - http://directory.apache.org/kerby/kerby-pkix) - * Apache Log4j 1.x Compatibility API (org.apache.logging.log4j:log4j-1.2-api:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-1.2-api/) - * Apache Log4j API (org.apache.logging.log4j:log4j-api:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-api/) - * Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-core/) - * Apache Log4j JUL Adapter (org.apache.logging.log4j:log4j-jul:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-jul/) - * Apache Log4j Layout for JSON template (org.apache.logging.log4j:log4j-layout-template-json:2.16.0 - https://logging.apache.org/log4j/2.x/log4j-layout-template-json/) - * Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/) - * Apache Log4j Web (org.apache.logging.log4j:log4j-web:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-web/) - * Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-common) - * Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-icu) - * Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji) - * Lucene Nori Korean Morphological Analyzer (org.apache.lucene:lucene-analyzers-nori:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-nori) - * Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic) - * Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn) - * Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-stempel) - * Lucene Memory (org.apache.lucene:lucene-backward-codecs:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-backward-codecs) - * Lucene Classification (org.apache.lucene:lucene-classification:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-classification) - * Lucene codecs (org.apache.lucene:lucene-codecs:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-codecs) - * Lucene Core (org.apache.lucene:lucene-core:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-core) - * Lucene Expressions (org.apache.lucene:lucene-expressions:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-expressions) - * Lucene Grouping (org.apache.lucene:lucene-grouping:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-grouping) - * Lucene Highlighter (org.apache.lucene:lucene-highlighter:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-highlighter) - * Lucene Join (org.apache.lucene:lucene-join:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-join) - * Lucene Memory (org.apache.lucene:lucene-memory:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-memory) - * Lucene Miscellaneous (org.apache.lucene:lucene-misc:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-misc) - * Lucene Queries (org.apache.lucene:lucene-queries:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-queries) - * Lucene QueryParsers (org.apache.lucene:lucene-queryparser:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-queryparser) - * Lucene Sandbox (org.apache.lucene:lucene-sandbox:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-sandbox) - * Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras) - * Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-spatial3d) - * Lucene Suggest (org.apache.lucene:lucene-suggest:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-suggest) - * Apache FontBox (org.apache.pdfbox:fontbox:2.0.27 - http://pdfbox.apache.org/) - * PDFBox JBIG2 ImageIO plugin (org.apache.pdfbox:jbig2-imageio:3.0.3 - https://www.apache.org/jbig2-imageio/) - * Apache JempBox (org.apache.pdfbox:jempbox:1.8.16 - http://www.apache.org/pdfbox-parent/jempbox/) - * Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.27 - https://www.apache.org/pdfbox-parent/pdfbox/) - * Apache PDFBox Debugger (org.apache.pdfbox:pdfbox-debugger:2.0.25 - https://www.apache.org/pdfbox-parent/pdfbox-debugger/) - * Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.25 - https://www.apache.org/pdfbox-parent/pdfbox-tools/) - * Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.25 - https://www.apache.org/pdfbox-parent/xmpbox/) - * Apache POI - Common (org.apache.poi:poi:5.2.0 - https://poi.apache.org/) - * Apache POI - API based on OPC and OOXML schemas (org.apache.poi:poi-ooxml:5.2.0 - https://poi.apache.org/) - * Apache POI (org.apache.poi:poi-ooxml-lite:5.2.0 - https://poi.apache.org/) - * Apache POI (org.apache.poi:poi-scratchpad:5.2.0 - https://poi.apache.org/) - * Apache Solr Core (org.apache.solr:solr-core:8.11.1 - https://lucene.apache.org/solr-parent/solr-core) - * Apache Solr Solrj (org.apache.solr:solr-solrj:8.11.1 - https://lucene.apache.org/solr-parent/solr-solrj) + * Apache Log4j 1.x Compatibility API (org.apache.logging.log4j:log4j-1.2-api:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-1.2-api/) + * Apache Log4j API (org.apache.logging.log4j:log4j-api:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-api/) + * Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-core/) + * Apache Log4j JUL Adapter (org.apache.logging.log4j:log4j-jul:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-jul/) + * Apache Log4j Layout for JSON template (org.apache.logging.log4j:log4j-layout-template-json:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-layout-template-json/) + * Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/) + * Apache Log4j Web (org.apache.logging.log4j:log4j-web:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-web/) + * Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-common) + * Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-icu) + * Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji) + * Lucene Nori Korean Morphological Analyzer (org.apache.lucene:lucene-analyzers-nori:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-nori) + * Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic) + * Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn) + * Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-stempel) + * Lucene Memory (org.apache.lucene:lucene-backward-codecs:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-backward-codecs) + * Lucene Classification (org.apache.lucene:lucene-classification:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-classification) + * Lucene codecs (org.apache.lucene:lucene-codecs:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-codecs) + * Lucene Core (org.apache.lucene:lucene-core:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-core) + * Lucene Expressions (org.apache.lucene:lucene-expressions:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-expressions) + * Lucene Grouping (org.apache.lucene:lucene-grouping:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-grouping) + * Lucene Highlighter (org.apache.lucene:lucene-highlighter:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-highlighter) + * Lucene Join (org.apache.lucene:lucene-join:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-join) + * Lucene Memory (org.apache.lucene:lucene-memory:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-memory) + * Lucene Miscellaneous (org.apache.lucene:lucene-misc:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-misc) + * Lucene Queries (org.apache.lucene:lucene-queries:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-queries) + * Lucene QueryParsers (org.apache.lucene:lucene-queryparser:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-queryparser) + * Lucene Sandbox (org.apache.lucene:lucene-sandbox:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-sandbox) + * Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras) + * Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-spatial3d) + * Lucene Suggest (org.apache.lucene:lucene-suggest:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-suggest) + * Apache FontBox (org.apache.pdfbox:fontbox:2.0.28 - http://pdfbox.apache.org/) + * PDFBox JBIG2 ImageIO plugin (org.apache.pdfbox:jbig2-imageio:3.0.4 - https://www.apache.org/jbig2-imageio/) + * Apache JempBox (org.apache.pdfbox:jempbox:1.8.17 - http://www.apache.org/pdfbox-parent/jempbox/) + * Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.28 - https://www.apache.org/pdfbox-parent/pdfbox/) + * Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.27 - https://www.apache.org/pdfbox-parent/pdfbox-tools/) + * Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.27 - https://www.apache.org/pdfbox-parent/xmpbox/) + * Apache POI - Common (org.apache.poi:poi:5.2.3 - https://poi.apache.org/) + * Apache POI - API based on OPC and OOXML schemas (org.apache.poi:poi-ooxml:5.2.3 - https://poi.apache.org/) + * Apache POI (org.apache.poi:poi-ooxml-lite:5.2.3 - https://poi.apache.org/) + * Apache POI (org.apache.poi:poi-scratchpad:5.2.3 - https://poi.apache.org/) + * Apache Solr Core (org.apache.solr:solr-core:8.11.2 - https://lucene.apache.org/solr-parent/solr-core) + * Apache Solr Solrj (org.apache.solr:solr-solrj:8.11.2 - https://lucene.apache.org/solr-parent/solr-solrj) * Apache Standard Taglib Implementation (org.apache.taglibs:taglibs-standard-impl:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-impl) * Apache Standard Taglib Specification API (org.apache.taglibs:taglibs-standard-spec:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-spec) * Apache Thrift (org.apache.thrift:libthrift:0.9.2 - http://thrift.apache.org) - * Apache Tika core (org.apache.tika:tika-core:2.3.0 - https://tika.apache.org/) - * Apache Tika Apple parser module (org.apache.tika:tika-parser-apple-module:2.3.0 - https://tika.apache.org/tika-parser-apple-module/) - * Apache Tika audiovideo parser module (org.apache.tika:tika-parser-audiovideo-module:2.3.0 - https://tika.apache.org/tika-parser-audiovideo-module/) - * Apache Tika cad parser module (org.apache.tika:tika-parser-cad-module:2.3.0 - https://tika.apache.org/tika-parser-cad-module/) - * Apache Tika code parser module (org.apache.tika:tika-parser-code-module:2.3.0 - https://tika.apache.org/tika-parser-code-module/) - * Apache Tika crypto parser module (org.apache.tika:tika-parser-crypto-module:2.3.0 - https://tika.apache.org/tika-parser-crypto-module/) - * Apache Tika digest commons (org.apache.tika:tika-parser-digest-commons:2.3.0 - https://tika.apache.org/tika-parser-digest-commons/) - * Apache Tika font parser module (org.apache.tika:tika-parser-font-module:2.3.0 - https://tika.apache.org/tika-parser-font-module/) - * Apache Tika html commons (org.apache.tika:tika-parser-html-commons:2.3.0 - https://tika.apache.org/tika-parser-html-commons/) - * Apache Tika html parser module (org.apache.tika:tika-parser-html-module:2.3.0 - https://tika.apache.org/tika-parser-html-module/) - * Apache Tika image parser module (org.apache.tika:tika-parser-image-module:2.3.0 - https://tika.apache.org/tika-parser-image-module/) - * Apache Tika mail commons (org.apache.tika:tika-parser-mail-commons:2.3.0 - https://tika.apache.org/tika-parser-mail-commons/) - * Apache Tika mail parser module (org.apache.tika:tika-parser-mail-module:2.3.0 - https://tika.apache.org/tika-parser-mail-module/) - * Apache Tika Microsoft parser module (org.apache.tika:tika-parser-microsoft-module:2.3.0 - https://tika.apache.org/tika-parser-microsoft-module/) - * Apache Tika miscellaneous office format parser module (org.apache.tika:tika-parser-miscoffice-module:2.3.0 - https://tika.apache.org/tika-parser-miscoffice-module/) - * Apache Tika news parser module (org.apache.tika:tika-parser-news-module:2.3.0 - https://tika.apache.org/tika-parser-news-module/) - * Apache Tika OCR parser module (org.apache.tika:tika-parser-ocr-module:2.3.0 - https://tika.apache.org/tika-parser-ocr-module/) - * Apache Tika PDF parser module (org.apache.tika:tika-parser-pdf-module:2.3.0 - https://tika.apache.org/tika-parser-pdf-module/) - * Apache Tika package parser module (org.apache.tika:tika-parser-pkg-module:2.3.0 - https://tika.apache.org/tika-parser-pkg-module/) - * Apache Tika text parser module (org.apache.tika:tika-parser-text-module:2.3.0 - https://tika.apache.org/tika-parser-text-module/) - * Apache Tika XML parser module (org.apache.tika:tika-parser-xml-module:2.3.0 - https://tika.apache.org/tika-parser-xml-module/) - * Apache Tika XMP commons (org.apache.tika:tika-parser-xmp-commons:2.3.0 - https://tika.apache.org/tika-parser-xmp-commons/) - * Apache Tika ZIP commons (org.apache.tika:tika-parser-zip-commons:2.3.0 - https://tika.apache.org/tika-parser-zip-commons/) - * Apache Tika standard parser package (org.apache.tika:tika-parsers-standard-package:2.3.0 - https://tika.apache.org/tika-parsers/tika-parsers-standard/tika-parsers-standard-package/) - * tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.63 - https://tomcat.apache.org/) - * tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.63 - https://tomcat.apache.org/) - * tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.63 - https://tomcat.apache.org/) + * Apache Tika core (org.apache.tika:tika-core:2.5.0 - https://tika.apache.org/) + * Apache Tika Apple parser module (org.apache.tika:tika-parser-apple-module:2.5.0 - https://tika.apache.org/tika-parser-apple-module/) + * Apache Tika audiovideo parser module (org.apache.tika:tika-parser-audiovideo-module:2.5.0 - https://tika.apache.org/tika-parser-audiovideo-module/) + * Apache Tika cad parser module (org.apache.tika:tika-parser-cad-module:2.5.0 - https://tika.apache.org/tika-parser-cad-module/) + * Apache Tika code parser module (org.apache.tika:tika-parser-code-module:2.5.0 - https://tika.apache.org/tika-parser-code-module/) + * Apache Tika crypto parser module (org.apache.tika:tika-parser-crypto-module:2.5.0 - https://tika.apache.org/tika-parser-crypto-module/) + * Apache Tika digest commons (org.apache.tika:tika-parser-digest-commons:2.5.0 - https://tika.apache.org/tika-parser-digest-commons/) + * Apache Tika font parser module (org.apache.tika:tika-parser-font-module:2.5.0 - https://tika.apache.org/tika-parser-font-module/) + * Apache Tika html parser module (org.apache.tika:tika-parser-html-module:2.5.0 - https://tika.apache.org/tika-parser-html-module/) + * Apache Tika image parser module (org.apache.tika:tika-parser-image-module:2.5.0 - https://tika.apache.org/tika-parser-image-module/) + * Apache Tika mail commons (org.apache.tika:tika-parser-mail-commons:2.5.0 - https://tika.apache.org/tika-parser-mail-commons/) + * Apache Tika mail parser module (org.apache.tika:tika-parser-mail-module:2.5.0 - https://tika.apache.org/tika-parser-mail-module/) + * Apache Tika Microsoft parser module (org.apache.tika:tika-parser-microsoft-module:2.5.0 - https://tika.apache.org/tika-parser-microsoft-module/) + * Apache Tika miscellaneous office format parser module (org.apache.tika:tika-parser-miscoffice-module:2.5.0 - https://tika.apache.org/tika-parser-miscoffice-module/) + * Apache Tika news parser module (org.apache.tika:tika-parser-news-module:2.5.0 - https://tika.apache.org/tika-parser-news-module/) + * Apache Tika OCR parser module (org.apache.tika:tika-parser-ocr-module:2.5.0 - https://tika.apache.org/tika-parser-ocr-module/) + * Apache Tika PDF parser module (org.apache.tika:tika-parser-pdf-module:2.5.0 - https://tika.apache.org/tika-parser-pdf-module/) + * Apache Tika package parser module (org.apache.tika:tika-parser-pkg-module:2.5.0 - https://tika.apache.org/tika-parser-pkg-module/) + * Apache Tika text parser module (org.apache.tika:tika-parser-text-module:2.5.0 - https://tika.apache.org/tika-parser-text-module/) + * Apache Tika WARC parser module (org.apache.tika:tika-parser-webarchive-module:2.5.0 - https://tika.apache.org/tika-parser-webarchive-module/) + * Apache Tika XML parser module (org.apache.tika:tika-parser-xml-module:2.5.0 - https://tika.apache.org/tika-parser-xml-module/) + * Apache Tika XMP commons (org.apache.tika:tika-parser-xmp-commons:2.5.0 - https://tika.apache.org/tika-parser-xmp-commons/) + * Apache Tika ZIP commons (org.apache.tika:tika-parser-zip-commons:2.5.0 - https://tika.apache.org/tika-parser-zip-commons/) + * Apache Tika standard parser package (org.apache.tika:tika-parsers-standard-package:2.5.0 - https://tika.apache.org/tika-parsers/tika-parsers-standard/tika-parsers-standard-package/) + * tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.75 - https://tomcat.apache.org/) + * tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.75 - https://tomcat.apache.org/) + * tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.75 - https://tomcat.apache.org/) * Apache Velocity - Engine (org.apache.velocity:velocity-engine-core:2.3 - http://velocity.apache.org/engine/devel/velocity-engine-core/) * Apache Velocity - JSR 223 Scripting (org.apache.velocity:velocity-engine-scripting:2.2 - http://velocity.apache.org/engine/devel/velocity-engine-scripting/) * Axiom API (org.apache.ws.commons.axiom:axiom-api:1.2.22 - http://ws.apache.org/axiom/) * Abdera Model (FOM) Implementation (org.apache.ws.commons.axiom:fom-impl:1.2.22 - http://ws.apache.org/axiom/implementations/fom-impl/) - * XmlBeans (org.apache.xmlbeans:xmlbeans:5.0.3 - https://xmlbeans.apache.org/) + * XmlBeans (org.apache.xmlbeans:xmlbeans:5.1.1 - https://xmlbeans.apache.org/) * Apache ZooKeeper - Server (org.apache.zookeeper:zookeeper:3.6.2 - http://zookeeper.apache.org/zookeeper) * Apache ZooKeeper - Jute (org.apache.zookeeper:zookeeper-jute:3.6.2 - http://zookeeper.apache.org/zookeeper-jute) * org.apiguardian:apiguardian-api (org.apiguardian:apiguardian-api:1.1.0 - https://github.com/apiguardian-team/apiguardian) - * AssertJ fluent assertions (org.assertj:assertj-core:3.21.0 - https://assertj.github.io/doc/assertj-core/) + * AssertJ fluent assertions (org.assertj:assertj-core:3.22.0 - https://assertj.github.io/doc/assertj-core/) * Evo Inflector (org.atteo:evo-inflector:1.3 - http://atteo.org/static/evo-inflector) * jose4j (org.bitbucket.b_c:jose4j:0.6.5 - https://bitbucket.org/b_c/jose4j/) * TagSoup (org.ccil.cowan.tagsoup:tagsoup:1.2.1 - http://home.ccil.org/~cowan/XML/tagsoup/) @@ -279,34 +286,34 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/) * Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client) * Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client) - * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) + * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) - * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) + * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) * Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client) * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation) - * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-continuation) - * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-deploy) - * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-http) - * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-io) + * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-continuation) + * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-deploy) + * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-http) + * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-io) * Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx) * Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite) * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security) - * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-security) - * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-server) - * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlet) - * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlets) - * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util) - * Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util-ajax) - * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-webapp) - * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-xml) + * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-security) + * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-server) + * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlet) + * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlets) + * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util) + * Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util-ajax) + * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-webapp) + * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-xml) * Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client) - * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-common) + * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-common) * Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack) * Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport) - * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-server) + * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-server) * Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas) * Ehcache (org.ehcache:ehcache:3.4.0 - http://ehcache.org) * flyway-core (org.flywaydb:flyway-core:8.4.4 - https://flywaydb.org/flyway-core) @@ -315,8 +322,10 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) - * Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.0.23.Final - http://hibernate.org/validator/hibernate-validator) - * Hibernate Validator Portable Extension (org.hibernate.validator:hibernate-validator-cdi:6.0.23.Final - http://hibernate.org/validator/hibernate-validator-cdi) + * Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.2.5.Final - http://hibernate.org/validator/hibernate-validator) + * Hibernate Validator Portable Extension (org.hibernate.validator:hibernate-validator-cdi:6.2.5.Final - http://hibernate.org/validator/hibernate-validator-cdi) + * leveldb (org.iq80.leveldb:leveldb:0.12 - http://github.com/dain/leveldb/leveldb) + * leveldb-api (org.iq80.leveldb:leveldb-api:0.12 - http://github.com/dain/leveldb/leveldb-api) * Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/) * Java Annotation Indexer (org.jboss:jandex:2.4.2.Final - http://www.jboss.org/jandex) * JBoss Logging 3 (org.jboss.logging:jboss-logging:3.4.3.Final - http://www.jboss.org) @@ -337,59 +346,67 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Jetty Servlet Tester (org.mortbay.jetty:jetty-servlet-tester:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-servlet-tester) * Jetty Utilities (org.mortbay.jetty:jetty-util:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-util) * Servlet Specification API (org.mortbay.jetty:servlet-api:2.5-20081211 - http://jetty.mortbay.org/servlet-api) + * jwarc (org.netpreserve:jwarc:0.19.0 - https://github.com/iipc/jwarc) * Objenesis (org.objenesis:objenesis:3.2 - http://objenesis.org/objenesis) * parboiled-core (org.parboiled:parboiled-core:1.3.1 - http://parboiled.org) * parboiled-java (org.parboiled:parboiled-java:1.3.1 - http://parboiled.org) * RRD4J (org.rrd4j:rrd4j:3.5 - https://github.com/rrd4j/rrd4j/) - * JSONassert (org.skyscreamer:jsonassert:1.5.0 - https://github.com/skyscreamer/JSONassert) - * Spring AOP (org.springframework:spring-aop:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Beans (org.springframework:spring-beans:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Context (org.springframework:spring-context:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Context Support (org.springframework:spring-context-support:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Core (org.springframework:spring-core:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Expression Language (SpEL) (org.springframework:spring-expression:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Commons Logging Bridge (org.springframework:spring-jcl:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring JDBC (org.springframework:spring-jdbc:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Object/Relational Mapping (org.springframework:spring-orm:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring TestContext Framework (org.springframework:spring-test:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Transaction (org.springframework:spring-tx:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Web (org.springframework:spring-web:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Web MVC (org.springframework:spring-webmvc:5.3.20 - https://github.com/spring-projects/spring-framework) - * spring-boot (org.springframework.boot:spring-boot:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-actuator (org.springframework.boot:spring-boot-actuator:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-actuator-autoconfigure (org.springframework.boot:spring-boot-actuator-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-autoconfigure (org.springframework.boot:spring-boot-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot) + * Scala Library (org.scala-lang:scala-library:2.13.9 - https://www.scala-lang.org/) + * Scala Compiler (org.scala-lang:scala-reflect:2.13.0 - https://www.scala-lang.org/) + * scala-collection-compat (org.scala-lang.modules:scala-collection-compat_2.13:2.1.6 - http://www.scala-lang.org/) + * scala-java8-compat (org.scala-lang.modules:scala-java8-compat_2.13:0.9.0 - http://www.scala-lang.org/) + * scala-parser-combinators (org.scala-lang.modules:scala-parser-combinators_2.13:1.1.2 - http://www.scala-lang.org/) + * scala-xml (org.scala-lang.modules:scala-xml_2.13:1.3.0 - http://www.scala-lang.org/) + * JSONassert (org.skyscreamer:jsonassert:1.5.1 - https://github.com/skyscreamer/JSONassert) + * JCL 1.2 implemented over SLF4J (org.slf4j:jcl-over-slf4j:1.7.36 - http://www.slf4j.org) + * Spring AOP (org.springframework:spring-aop:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Beans (org.springframework:spring-beans:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Context (org.springframework:spring-context:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Context Support (org.springframework:spring-context-support:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Core (org.springframework:spring-core:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Expression Language (SpEL) (org.springframework:spring-expression:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Commons Logging Bridge (org.springframework:spring-jcl:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring JDBC (org.springframework:spring-jdbc:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Object/Relational Mapping (org.springframework:spring-orm:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring TestContext Framework (org.springframework:spring-test:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Transaction (org.springframework:spring-tx:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Web (org.springframework:spring-web:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Web MVC (org.springframework:spring-webmvc:5.3.27 - https://github.com/spring-projects/spring-framework) + * spring-boot (org.springframework.boot:spring-boot:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-actuator (org.springframework.boot:spring-boot-actuator:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-actuator-autoconfigure (org.springframework.boot:spring-boot-actuator-autoconfigure:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-autoconfigure (org.springframework.boot:spring-boot-autoconfigure:2.7.12 - https://spring.io/projects/spring-boot) * Spring Boot Configuration Processor (org.springframework.boot:spring-boot-configuration-processor:2.0.0.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-tools/spring-boot-configuration-processor) - * spring-boot-starter (org.springframework.boot:spring-boot-starter:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-actuator (org.springframework.boot:spring-boot-starter-actuator:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-aop (org.springframework.boot:spring-boot-starter-aop:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-cache (org.springframework.boot:spring-boot-starter-cache:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-data-rest (org.springframework.boot:spring-boot-starter-data-rest:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-json (org.springframework.boot:spring-boot-starter-json:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-log4j2 (org.springframework.boot:spring-boot-starter-log4j2:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-security (org.springframework.boot:spring-boot-starter-security:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-test (org.springframework.boot:spring-boot-starter-test:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-tomcat (org.springframework.boot:spring-boot-starter-tomcat:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-web (org.springframework.boot:spring-boot-starter-web:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-test (org.springframework.boot:spring-boot-test:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-test-autoconfigure (org.springframework.boot:spring-boot-test-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot) - * Spring Data Core (org.springframework.data:spring-data-commons:2.6.4 - https://www.spring.io/spring-data/spring-data-commons) - * Spring Data REST - Core (org.springframework.data:spring-data-rest-core:3.6.4 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-core) - * Spring Data REST - WebMVC (org.springframework.data:spring-data-rest-webmvc:3.6.4 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-webmvc) - * Spring HATEOAS (org.springframework.hateoas:spring-hateoas:1.4.2 - https://github.com/spring-projects/spring-hateoas) + * spring-boot-starter (org.springframework.boot:spring-boot-starter:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-actuator (org.springframework.boot:spring-boot-starter-actuator:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-aop (org.springframework.boot:spring-boot-starter-aop:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-cache (org.springframework.boot:spring-boot-starter-cache:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-data-rest (org.springframework.boot:spring-boot-starter-data-rest:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-json (org.springframework.boot:spring-boot-starter-json:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-log4j2 (org.springframework.boot:spring-boot-starter-log4j2:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-security (org.springframework.boot:spring-boot-starter-security:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-test (org.springframework.boot:spring-boot-starter-test:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-tomcat (org.springframework.boot:spring-boot-starter-tomcat:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-web (org.springframework.boot:spring-boot-starter-web:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-test (org.springframework.boot:spring-boot-test:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-test-autoconfigure (org.springframework.boot:spring-boot-test-autoconfigure:2.7.12 - https://spring.io/projects/spring-boot) + * Spring Data Core (org.springframework.data:spring-data-commons:2.7.12 - https://www.spring.io/spring-data/spring-data-commons) + * Spring Data REST - Core (org.springframework.data:spring-data-rest-core:3.7.12 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-core) + * Spring Data REST - WebMVC (org.springframework.data:spring-data-rest-webmvc:3.7.12 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-webmvc) + * Spring HATEOAS (org.springframework.hateoas:spring-hateoas:1.5.4 - https://github.com/spring-projects/spring-hateoas) * Spring Plugin - Core (org.springframework.plugin:spring-plugin-core:2.0.0.RELEASE - https://github.com/spring-projects/spring-plugin/spring-plugin-core) - * spring-security-config (org.springframework.security:spring-security-config:5.6.5 - https://spring.io/projects/spring-security) - * spring-security-core (org.springframework.security:spring-security-core:5.6.5 - https://spring.io/projects/spring-security) - * spring-security-crypto (org.springframework.security:spring-security-crypto:5.6.5 - https://spring.io/projects/spring-security) - * spring-security-test (org.springframework.security:spring-security-test:5.6.5 - https://spring.io/projects/spring-security) - * spring-security-web (org.springframework.security:spring-security-web:5.6.5 - https://spring.io/projects/spring-security) + * spring-security-config (org.springframework.security:spring-security-config:5.7.8 - https://spring.io/projects/spring-security) + * spring-security-core (org.springframework.security:spring-security-core:5.7.8 - https://spring.io/projects/spring-security) + * spring-security-crypto (org.springframework.security:spring-security-crypto:5.7.8 - https://spring.io/projects/spring-security) + * spring-security-test (org.springframework.security:spring-security-test:5.7.8 - https://spring.io/projects/spring-security) + * spring-security-web (org.springframework.security:spring-security-web:5.7.8 - https://spring.io/projects/spring-security) * SWORD v2 :: Common Server Library (org.swordapp:sword2-server:1.0 - http://www.swordapp.org/) * snappy-java (org.xerial.snappy:snappy-java:1.1.7.6 - https://github.com/xerial/snappy-java) * xml-matchers (org.xmlmatchers:xml-matchers:0.10 - http://code.google.com/p/xml-matchers/) * org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.8.0 - https://www.xmlunit.org/) - * org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.8.4 - https://www.xmlunit.org/) + * org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.9.1 - https://www.xmlunit.org/) * org.xmlunit:xmlunit-placeholders (org.xmlunit:xmlunit-placeholders:2.8.0 - https://www.xmlunit.org/xmlunit-placeholders/) - * SnakeYAML (org.yaml:snakeyaml:1.29 - http://www.snakeyaml.org) + * SnakeYAML (org.yaml:snakeyaml:1.30 - https://bitbucket.org/snakeyaml/snakeyaml) * software.amazon.ion:ion-java (software.amazon.ion:ion-java:1.0.2 - https://github.com/amznlabs/ion-java/) * Xalan Java Serializer (xalan:serializer:2.7.2 - http://xml.apache.org/xalan-j/) * xalan (xalan:xalan:2.7.0 - no url defined) @@ -404,7 +421,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * coverity-escapers (com.coverity.security:coverity-escapers:1.1.1 - http://coverity.com/security) * Java Advanced Imaging Image I/O Tools API core (standalone) (com.github.jai-imageio:jai-imageio-core:1.4.0 - https://github.com/jai-imageio/jai-imageio-core) * JSONLD Java :: Core (com.github.jsonld-java:jsonld-java:0.5.1 - http://github.com/jsonld-java/jsonld-java/jsonld-java/) - * curvesapi (com.github.virtuald:curvesapi:1.06 - https://github.com/virtuald/curvesapi) + * curvesapi (com.github.virtuald:curvesapi:1.07 - https://github.com/virtuald/curvesapi) * Protocol Buffers [Core] (com.google.protobuf:protobuf-java:3.11.0 - https://developers.google.com/protocol-buffers/protobuf-java/) * JZlib (com.jcraft:jzlib:1.1.3 - http://www.jcraft.com/jzlib/) * dnsjava (dnsjava:dnsjava:2.1.7 - http://www.dnsjava.org) @@ -426,11 +443,15 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * asm-commons (org.ow2.asm:asm-commons:8.0.1 - http://asm.ow2.io/) * asm-tree (org.ow2.asm:asm-tree:7.1 - http://asm.ow2.org/) * asm-util (org.ow2.asm:asm-util:7.1 - http://asm.ow2.org/) - * PostgreSQL JDBC Driver (org.postgresql:postgresql:42.4.3 - https://jdbc.postgresql.org) + * PostgreSQL JDBC Driver (org.postgresql:postgresql:42.6.0 - https://jdbc.postgresql.org) * Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections) * JMatIO (org.tallison:jmatio:1.5 - https://github.com/tballison/jmatio) * XMLUnit for Java (xmlunit:xmlunit:1.3 - http://xmlunit.sourceforge.net/) + CC0: + + * reactive-streams (org.reactivestreams:reactive-streams:1.0.2 - http://www.reactive-streams.org/) + Common Development and Distribution License (CDDL): * istack common utility code runtime (com.sun.istack:istack-commons-runtime:3.0.7 - http://java.net/istack-commons/istack-commons-runtime/) @@ -446,7 +467,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Java Servlet API (javax.servlet:javax.servlet-api:3.1.0 - http://servlet-spec.java.net) * javax.transaction API (javax.transaction:javax.transaction-api:1.3 - http://jta-spec.java.net) * jaxb-api (javax.xml.bind:jaxb-api:2.3.1 - https://github.com/javaee/jaxb-spec/jaxb-api) - * JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight) + * JHighlight (org.codelibs:jhighlight:1.1.0 - https://github.com/codelibs/jhighlight) * HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api) * ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator) * HK2 Implementation Utilities (org.glassfish.hk2:hk2-utils:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-utils) @@ -489,34 +510,34 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/) * Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client) * Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client) - * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) + * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) - * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) + * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) * Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client) * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation) - * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-continuation) - * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-deploy) - * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-http) - * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-io) + * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-continuation) + * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-deploy) + * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-http) + * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-io) * Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx) * Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite) * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security) - * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-security) - * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-server) - * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlet) - * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlets) - * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util) - * Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util-ajax) - * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-webapp) - * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-xml) + * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-security) + * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-server) + * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlet) + * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlets) + * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util) + * Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util-ajax) + * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-webapp) + * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-xml) * Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client) - * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-common) + * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-common) * Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack) * Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport) - * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-server) + * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-server) * Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas) * HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api) * ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator) @@ -542,10 +563,10 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * msg-simple (com.github.java-json-tools:msg-simple:1.2 - https://github.com/java-json-tools/msg-simple) * uri-template (com.github.java-json-tools:uri-template:0.10 - https://github.com/java-json-tools/uri-template) * FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/) - * JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight) - * Hibernate ORM - hibernate-core (org.hibernate:hibernate-core:5.6.5.Final - https://hibernate.org/orm) - * Hibernate ORM - hibernate-jcache (org.hibernate:hibernate-jcache:5.6.5.Final - https://hibernate.org/orm) - * Hibernate ORM - hibernate-jpamodelgen (org.hibernate:hibernate-jpamodelgen:5.6.5.Final - https://hibernate.org/orm) + * JHighlight (org.codelibs:jhighlight:1.1.0 - https://github.com/codelibs/jhighlight) + * Hibernate ORM - hibernate-core (org.hibernate:hibernate-core:5.6.15.Final - https://hibernate.org/orm) + * Hibernate ORM - hibernate-jcache (org.hibernate:hibernate-jcache:5.6.15.Final - https://hibernate.org/orm) + * Hibernate ORM - hibernate-jpamodelgen (org.hibernate:hibernate-jpamodelgen:5.6.15.Final - https://hibernate.org/orm) * Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:5.1.2.Final - http://hibernate.org) * im4java (org.im4java:im4java:1.4.0 - http://sourceforge.net/projects/im4java/) * Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/) @@ -562,9 +583,11 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines MIT License: + * better-files (com.github.pathikrit:better-files_2.13:3.9.1 - https://github.com/pathikrit/better-files) * Java SemVer (com.github.zafarkhaja:java-semver:0.9.0 - https://github.com/zafarkhaja/jsemver) - * dd-plist (com.googlecode.plist:dd-plist:1.23 - http://www.github.com/3breadt/dd-plist) + * dd-plist (com.googlecode.plist:dd-plist:1.25 - http://www.github.com/3breadt/dd-plist) * DigitalCollections: IIIF API Library (de.digitalcollections.iiif:iiif-apis:0.3.9 - https://github.com/dbmdz/iiif-apis) + * s3mock (io.findify:s3mock_2.13:0.2.6 - https://github.com/findify/s3mock) * JOpt Simple (net.sf.jopt-simple:jopt-simple:5.0.4 - http://jopt-simple.github.io/jopt-simple) * Bouncy Castle S/MIME API (org.bouncycastle:bcmail-jdk15on:1.70 - https://www.bouncycastle.org/java.html) * Bouncy Castle PKIX, CMS, EAC, TSP, PKCS, OCSP, CMP, and CRMF APIs (org.bouncycastle:bcpkix-jdk15on:1.70 - https://www.bouncycastle.org/java.html) @@ -572,15 +595,14 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Bouncy Castle ASN.1 Extension and Utility APIs (org.bouncycastle:bcutil-jdk15on:1.70 - https://www.bouncycastle.org/java.html) * org.brotli:dec (org.brotli:dec:0.1.2 - http://brotli.org/dec) * Checker Qual (org.checkerframework:checker-qual:3.10.0 - https://checkerframework.org) - * Checker Qual (org.checkerframework:checker-qual:3.5.0 - https://checkerframework.org) + * Checker Qual (org.checkerframework:checker-qual:3.31.0 - https://checkerframework.org) * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) * mockito-core (org.mockito:mockito-core:3.12.4 - https://github.com/mockito/mockito) * mockito-inline (org.mockito:mockito-inline:3.12.4 - https://github.com/mockito/mockito) * ORCID - Model (org.orcid:orcid-model:3.0.2 - http://github.com/ORCID/orcid-model) - * JCL 1.2 implemented over SLF4J (org.slf4j:jcl-over-slf4j:1.7.25 - http://www.slf4j.org) - * JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.7.25 - http://www.slf4j.org) - * SLF4J API Module (org.slf4j:slf4j-api:1.7.25 - http://www.slf4j.org) + * JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.7.36 - http://www.slf4j.org) + * SLF4J API Module (org.slf4j:slf4j-api:1.7.36 - http://www.slf4j.org) * SLF4J Extensions Module (org.slf4j:slf4j-ext:1.7.28 - http://www.slf4j.org) * HAL Browser (org.webjars:hal-browser:ad9b865 - http://webjars.org) * toastr (org.webjars.bowergithub.codeseven:toastr:2.1.4 - http://webjars.org) @@ -589,7 +611,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * jquery (org.webjars.bowergithub.jquery:jquery-dist:3.6.0 - https://www.webjars.org) * urijs (org.webjars.bowergithub.medialize:uri.js:1.19.10 - https://www.webjars.org) * bootstrap (org.webjars.bowergithub.twbs:bootstrap:4.6.1 - https://www.webjars.org) - * core-js (org.webjars.npm:core-js:3.28.0 - https://www.webjars.org) + * core-js (org.webjars.npm:core-js:3.30.1 - https://www.webjars.org) * @json-editor/json-editor (org.webjars.npm:json-editor__json-editor:2.6.1 - https://www.webjars.org) Mozilla Public License: @@ -606,17 +628,14 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) * HdrHistogram (org.hdrhistogram:HdrHistogram:2.1.12 - http://hdrhistogram.github.io/HdrHistogram/) + * JSON in Java (org.json:json:20230227 - https://github.com/douglascrockford/JSON-java) * LatencyUtils (org.latencyutils:LatencyUtils:2.0.3 - http://latencyutils.github.io/LatencyUtils/) * Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections) * XZ for Java (org.tukaani:xz:1.9 - https://tukaani.org/xz/java.html) - The JSON License: - - * JSON in Java (org.json:json:20180130 - https://github.com/douglascrockford/JSON-java) - UnRar License: - * Java Unrar (com.github.junrar:junrar:7.4.1 - https://github.com/junrar/junrar) + * Java Unrar (com.github.junrar:junrar:7.5.3 - https://github.com/junrar/junrar) Unicode/ICU License: diff --git a/docker-compose.yml b/docker-compose.yml index 6008b873ae5f..6c1615040722 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -28,6 +28,7 @@ services: # proxies.trusted.ipranges: This setting is required for a REST API running in Docker to trust requests # from the host machine. This IP range MUST correspond to the 'dspacenet' subnet defined above. proxies__P__trusted__P__ipranges: '172.23.0' + LOGGING_CONFIG: /dspace/config/log4j2-container.xml image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-dspace-7_x-test}" build: context: . @@ -62,13 +63,17 @@ services: while (! /dev/null 2>&1; do sleep 1; done; /dspace/bin/dspace database migrate catalina.sh run - # DSpace database container + # DSpace PostgreSQL database container dspacedb: container_name: dspacedb + # Uses a custom Postgres image with pgcrypto installed + image: "${DOCKER_OWNER:-dspace}/dspace-postgres-pgcrypto:${DSPACE_VER:-dspace-7_x}" + build: + # Must build out of subdirectory to have access to install script for pgcrypto + context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/ environment: PGDATA: /pgdata - # Uses a custom Postgres image with pgcrypto installed - image: dspace/dspace-postgres-pgcrypto + POSTGRES_PASSWORD: dspace networks: dspacenet: ports: @@ -77,12 +82,17 @@ services: stdin_open: true tty: true volumes: + # Keep Postgres data directory between reboots - pgdata:/pgdata # DSpace Solr container dspacesolr: container_name: dspacesolr - # Uses official Solr image at https://hub.docker.com/_/solr/ - image: solr:8.11-slim + image: "${DOCKER_OWNER:-dspace}/dspace-solr:${DSPACE_VER:-dspace-7_x}" + build: + context: . + dockerfile: ./dspace/src/main/docker/dspace-solr/Dockerfile + args: + SOLR_VERSION: "${SOLR_VER:-8.11}" networks: dspacenet: ports: @@ -92,30 +102,25 @@ services: tty: true working_dir: /var/solr/data volumes: - # Mount our local Solr core configs so that they are available as Solr configsets on container - - ./dspace/solr/authority:/opt/solr/server/solr/configsets/authority - - ./dspace/solr/oai:/opt/solr/server/solr/configsets/oai - - ./dspace/solr/search:/opt/solr/server/solr/configsets/search - - ./dspace/solr/statistics:/opt/solr/server/solr/configsets/statistics # Keep Solr data directory between reboots - solr_data:/var/solr/data - # Initialize all DSpace Solr cores using the mounted local configsets (see above), then start Solr + # Initialize all DSpace Solr cores then start Solr: # * First, run precreate-core to create the core (if it doesn't yet exist). If exists already, this is a no-op - # * Second, copy updated configs from mounted configsets to this core. If it already existed, this updates core - # to the latest configs. If it's a newly created core, this is a no-op. + # * Second, copy configsets to this core: + # Updates to Solr configs require the container to be rebuilt/restarted: `docker compose -p d7 up -d --build dspacesolr` entrypoint: - /bin/bash - '-c' - | init-var-solr precreate-core authority /opt/solr/server/solr/configsets/authority - cp -r -u /opt/solr/server/solr/configsets/authority/* authority + cp -r /opt/solr/server/solr/configsets/authority/* authority precreate-core oai /opt/solr/server/solr/configsets/oai - cp -r -u /opt/solr/server/solr/configsets/oai/* oai + cp -r /opt/solr/server/solr/configsets/oai/* oai precreate-core search /opt/solr/server/solr/configsets/search - cp -r -u /opt/solr/server/solr/configsets/search/* search + cp -r /opt/solr/server/solr/configsets/search/* search precreate-core statistics /opt/solr/server/solr/configsets/statistics - cp -r -u /opt/solr/server/solr/configsets/statistics/* statistics + cp -r /opt/solr/server/solr/configsets/statistics/* statistics exec solr -f volumes: assetstore: diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index c70f5a2a6370..a7d1fa104edd 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -12,7 +12,7 @@ org.dspace dspace-parent - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 .. @@ -102,7 +102,7 @@ org.codehaus.mojo build-helper-maven-plugin - 3.0.0 + 3.4.0 validate @@ -116,7 +116,10 @@ org.codehaus.mojo buildnumber-maven-plugin - 1.4 + 3.2.0 + + UNKNOWN_REVISION + validate @@ -784,7 +787,7 @@ org.json json - 20180130 + 20231013 @@ -896,7 +899,7 @@ - + io.findify s3mock_2.13 @@ -913,38 +916,43 @@ - + - io.netty netty-buffer - 4.1.68.Final + 4.1.94.Final io.netty netty-transport - 4.1.68.Final + 4.1.94.Final + + io.netty + netty-transport-native-unix-common + 4.1.94.Final + io.netty netty-common - 4.1.68.Final + 4.1.94.Final io.netty netty-handler - 4.1.68.Final + 4.1.94.Final io.netty netty-codec - 4.1.68.Final + 4.1.94.Final org.apache.velocity @@ -980,7 +988,8 @@ org.scala-lang scala-library - 2.13.2 + 2.13.9 + test diff --git a/dspace-api/src/main/java/org/dspace/access/status/AccessStatusHelper.java b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusHelper.java index 1cacbf6aedf6..2d782dc3b82a 100644 --- a/dspace-api/src/main/java/org/dspace/access/status/AccessStatusHelper.java +++ b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusHelper.java @@ -22,9 +22,21 @@ public interface AccessStatusHelper { * * @param context the DSpace context * @param item the item + * @param threshold the embargo threshold date * @return an access status value * @throws SQLException An exception that provides information on a database access error or other errors. */ public String getAccessStatusFromItem(Context context, Item item, Date threshold) throws SQLException; + + /** + * Retrieve embargo information for the item + * + * @param context the DSpace context + * @param item the item to check for embargo information + * @param threshold the embargo threshold date + * @return an embargo date + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + public String getEmbargoFromItem(Context context, Item item, Date threshold) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/access/status/AccessStatusServiceImpl.java b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusServiceImpl.java index 544dc99cb4dd..e1f11285d840 100644 --- a/dspace-api/src/main/java/org/dspace/access/status/AccessStatusServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusServiceImpl.java @@ -63,4 +63,9 @@ public void init() throws Exception { public String getAccessStatus(Context context, Item item) throws SQLException { return helper.getAccessStatusFromItem(context, item, forever_date); } + + @Override + public String getEmbargoFromItem(Context context, Item item) throws SQLException { + return helper.getEmbargoFromItem(context, item, forever_date); + } } diff --git a/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java b/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java index a67fa67af3b9..5f0e6d8b259b 100644 --- a/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java +++ b/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java @@ -26,6 +26,7 @@ import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.eperson.Group; +import org.joda.time.LocalDate; /** * Default plugin implementation of the access status helper. @@ -33,6 +34,11 @@ * calculate the access status of an item based on the policies of * the primary or the first bitstream in the original bundle. * Users can override this method for enhanced functionality. + * + * The getEmbargoInformationFromItem method provides a simple logic to + * * retrieve embargo information of bitstreams from an item based on the policies of + * * the primary or the first bitstream in the original bundle. + * * Users can override this method for enhanced functionality. */ public class DefaultAccessStatusHelper implements AccessStatusHelper { public static final String EMBARGO = "embargo"; @@ -54,12 +60,12 @@ public DefaultAccessStatusHelper() { /** * Look at the item's policies to determine an access status value. - * It is also considering a date threshold for embargos and restrictions. + * It is also considering a date threshold for embargoes and restrictions. * * If the item is null, simply returns the "unknown" value. * * @param context the DSpace context - * @param item the item to embargo + * @param item the item to check for embargoes * @param threshold the embargo threshold date * @return an access status value */ @@ -86,7 +92,7 @@ public String getAccessStatusFromItem(Context context, Item item, Date threshold .findFirst() .orElse(null); } - return caculateAccessStatusForDso(context, bitstream, threshold); + return calculateAccessStatusForDso(context, bitstream, threshold); } /** @@ -104,7 +110,7 @@ public String getAccessStatusFromItem(Context context, Item item, Date threshold * @param threshold the embargo threshold date * @return an access status value */ - private String caculateAccessStatusForDso(Context context, DSpaceObject dso, Date threshold) + private String calculateAccessStatusForDso(Context context, DSpaceObject dso, Date threshold) throws SQLException { if (dso == null) { return METADATA_ONLY; @@ -156,4 +162,87 @@ private String caculateAccessStatusForDso(Context context, DSpaceObject dso, Dat } return RESTRICTED; } + + /** + * Look at the policies of the primary (or first) bitstream of the item to retrieve its embargo. + * + * If the item is null, simply returns an empty map with no embargo information. + * + * @param context the DSpace context + * @param item the item to embargo + * @return an access status value + */ + @Override + public String getEmbargoFromItem(Context context, Item item, Date threshold) + throws SQLException { + Date embargoDate; + + // If Item status is not "embargo" then return a null embargo date. + String accessStatus = getAccessStatusFromItem(context, item, threshold); + + if (item == null || !accessStatus.equals(EMBARGO)) { + return null; + } + // Consider only the original bundles. + List bundles = item.getBundles(Constants.DEFAULT_BUNDLE_NAME); + // Check for primary bitstreams first. + Bitstream bitstream = bundles.stream() + .map(bundle -> bundle.getPrimaryBitstream()) + .filter(Objects::nonNull) + .findFirst() + .orElse(null); + if (bitstream == null) { + // If there is no primary bitstream, + // take the first bitstream in the bundles. + bitstream = bundles.stream() + .map(bundle -> bundle.getBitstreams()) + .flatMap(List::stream) + .findFirst() + .orElse(null); + } + + if (bitstream == null) { + return null; + } + + embargoDate = this.retrieveShortestEmbargo(context, bitstream); + + return embargoDate != null ? embargoDate.toString() : null; + } + + /** + * + */ + private Date retrieveShortestEmbargo(Context context, Bitstream bitstream) throws SQLException { + Date embargoDate = null; + // Only consider read policies. + List policies = authorizeService + .getPoliciesActionFilter(context, bitstream, Constants.READ); + + // Looks at all read policies. + for (ResourcePolicy policy : policies) { + boolean isValid = resourcePolicyService.isDateValid(policy); + Group group = policy.getGroup(); + + if (group != null && StringUtils.equals(group.getName(), Group.ANONYMOUS)) { + // Only calculate the status for the anonymous group. + if (!isValid) { + // If the policy is not valid there is an active embargo + Date startDate = policy.getStartDate(); + + if (startDate != null && !startDate.before(LocalDate.now().toDate())) { + // There is an active embargo: aim to take the shortest embargo (account for rare cases where + // more than one resource policy exists) + if (embargoDate == null) { + embargoDate = startDate; + } else { + embargoDate = startDate.before(embargoDate) ? startDate : embargoDate; + } + } + } + } + } + + return embargoDate; + } } diff --git a/dspace-api/src/main/java/org/dspace/access/status/service/AccessStatusService.java b/dspace-api/src/main/java/org/dspace/access/status/service/AccessStatusService.java index 43de5e3c47f1..2ed47bde4cd2 100644 --- a/dspace-api/src/main/java/org/dspace/access/status/service/AccessStatusService.java +++ b/dspace-api/src/main/java/org/dspace/access/status/service/AccessStatusService.java @@ -40,7 +40,18 @@ public interface AccessStatusService { * * @param context the DSpace context * @param item the item + * @return an access status value * @throws SQLException An exception that provides information on a database access error or other errors. */ public String getAccessStatus(Context context, Item item) throws SQLException; + + /** + * Retrieve embargo information for the item + * + * @param context the DSpace context + * @param item the item to check for embargo information + * @return an embargo date + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + public String getEmbargoFromItem(Context context, Item item) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java b/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java index 2677cb20501f..7a1aaa782787 100644 --- a/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java +++ b/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java @@ -7,8 +7,12 @@ */ package org.dspace.administer; +import java.io.File; import java.io.IOException; import java.sql.SQLException; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.xpath.XPath; @@ -30,6 +34,8 @@ import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataSchemaService; import org.dspace.core.Context; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; @@ -61,10 +67,18 @@ * } */ public class MetadataImporter { + public static final String BASE = DSpaceServicesFactory.getInstance() + .getConfigurationService().getProperty("dspace.dir") + File.separator + "config" + File.separator + + "registries" + File.separator; + public static final String REGISTRY_METADATA_PROPERTY = "registry.metadata.load"; + public static final String REGISTRY_BITSTREAM_FORMAT_PROPERTY = "registry.bitstream-formats.load"; + protected static MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance() .getMetadataSchemaService(); protected static MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance() .getMetadataFieldService(); + protected static ConfigurationService configurationService = DSpaceServicesFactory.getInstance() + .getConfigurationService(); /** * logging category @@ -100,18 +114,35 @@ public static void main(String[] args) Options options = new Options(); options.addOption("f", "file", true, "source xml file for DC fields"); options.addOption("u", "update", false, "update an existing schema"); + options.addOption("h", "help", false, "help message"); CommandLine line = parser.parse(options, args); - if (line.hasOption('f')) { + if (line.hasOption('h')) { + usage(); + System.exit(1); + } else if (line.hasOption('f')) { String file = line.getOptionValue('f'); boolean forceUpdate = line.hasOption('u'); loadRegistry(file, forceUpdate); } else { - usage(); - System.exit(1); + boolean forceUpdate = line.hasOption('u'); + for (String file : getAllRegistryFiles(REGISTRY_METADATA_PROPERTY)) { + loadRegistry(file, forceUpdate); + } } } + /** + * Load all registry file names from config + * + * @param propertyName name of the property that used in config + * @return list of all registry files + */ + public static List getAllRegistryFiles(String propertyName) { + List files = Arrays.asList(configurationService.getArrayProperty(propertyName)); + return files.stream().map(file -> BASE + file).collect(Collectors.toList()); + } + /** * Load the data from the specified file path into the database * @@ -285,7 +316,10 @@ private static void loadType(Context context, Node node) public static void usage() { String usage = "Use this class with the following option:\n" + " -f : specify which xml source file " + - "contains the DC fields to import.\n"; + "contains the DC fields to import.\n" + + "If you use the script without the -f parameter, then all" + + " registries will be loaded from the config/registries folder\n"; + System.out.println(usage); } } diff --git a/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java index 8d189038d9d1..91dcfb5dfec5 100644 --- a/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java @@ -7,33 +7,16 @@ */ package org.dspace.administer; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link ProcessCleaner} script. */ public class ProcessCleanerConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java b/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java index bbf320a0d5e5..37876c587ee7 100644 --- a/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java +++ b/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java @@ -33,6 +33,8 @@ import org.w3c.dom.NodeList; import org.xml.sax.SAXException; + + /** * Loads the bitstream format and Dublin Core type registries into the database. * Intended for use as a command-line tool. @@ -68,7 +70,7 @@ private RegistryLoader() { } */ public static void main(String[] argv) throws Exception { String usage = "Usage: " + RegistryLoader.class.getName() - + " (-bitstream | -metadata) registry-file.xml"; + + " (-bitstream | -metadata | -all) registry-file.xml"; Context context = null; @@ -81,10 +83,21 @@ public static void main(String[] argv) throws Exception { // Work out what we're loading if (argv[0].equalsIgnoreCase("-bitstream")) { - RegistryLoader.loadBitstreamFormats(context, argv[1]); + if (argv.length == 1) { + loadAllBitstreamFormats(context); + } else { + RegistryLoader.loadBitstreamFormats(context, argv[1]); + } } else if (argv[0].equalsIgnoreCase("-metadata")) { // Call MetadataImporter, as it handles Metadata schema updates - MetadataImporter.loadRegistry(argv[1], true); + if (argv.length == 1) { + loadAllRegistry(); + } else { + MetadataImporter.loadRegistry(argv[1], true); + } + } else if (argv[0].equalsIgnoreCase("-all")) { + loadAllBitstreamFormats(context); + loadAllRegistry(); } else { System.err.println(usage); } @@ -111,6 +124,30 @@ public static void main(String[] argv) throws Exception { } } + + /** + * Load all bitstream formats from configuration properties + * + * @param context DSpace context object + * @throws Exception + */ + private static void loadAllBitstreamFormats(Context context) throws Exception { + for (String file : MetadataImporter.getAllRegistryFiles(MetadataImporter.REGISTRY_BITSTREAM_FORMAT_PROPERTY)) { + RegistryLoader.loadBitstreamFormats(context, file); + } + } + + /** + * Load all metadata registry from configuration properties + * + * @throws Exception + */ + private static void loadAllRegistry() throws Exception { + for (String file : MetadataImporter.getAllRegistryFiles(MetadataImporter.REGISTRY_METADATA_PROPERTY)) { + MetadataImporter.loadRegistry(file, true); + } + } + /** * Load Bitstream Format metadata * diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java new file mode 100644 index 000000000000..7bef232f0450 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java @@ -0,0 +1,689 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +import static org.apache.commons.collections4.CollectionUtils.isEmpty; +import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; +import static org.dspace.authorize.ResourcePolicy.TYPE_CUSTOM; +import static org.dspace.authorize.ResourcePolicy.TYPE_INHERITED; +import static org.dspace.core.Constants.CONTENT_BUNDLE_NAME; + +import java.io.IOException; +import java.io.InputStream; +import java.sql.SQLException; +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.Arrays; +import java.util.Date; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.TimeZone; +import java.util.UUID; +import java.util.function.Function; +import java.util.stream.Collectors; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.cli.ParseException; +import org.apache.commons.lang3.StringUtils; +import org.dspace.app.bulkaccesscontrol.exception.BulkAccessControlException; +import org.dspace.app.bulkaccesscontrol.model.AccessCondition; +import org.dspace.app.bulkaccesscontrol.model.AccessConditionBitstream; +import org.dspace.app.bulkaccesscontrol.model.AccessConditionItem; +import org.dspace.app.bulkaccesscontrol.model.BulkAccessConditionConfiguration; +import org.dspace.app.bulkaccesscontrol.model.BulkAccessControlInput; +import org.dspace.app.bulkaccesscontrol.service.BulkAccessConditionConfigurationService; +import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory; +import org.dspace.app.mediafilter.service.MediaFilterService; +import org.dspace.app.util.DSpaceObjectUtilsImpl; +import org.dspace.app.util.service.DSpaceObjectUtils; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.content.Bitstream; +import org.dspace.content.Collection; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.discovery.SearchUtils; +import org.dspace.discovery.indexobject.IndexableItem; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.submit.model.AccessConditionOption; +import org.dspace.utils.DSpace; + +/** + * Implementation of {@link DSpaceRunnable} to perform a bulk access control via json file. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + * + */ +public class BulkAccessControl extends DSpaceRunnable> { + + private DSpaceObjectUtils dSpaceObjectUtils; + + private SearchService searchService; + + private ItemService itemService; + + private String filename; + + private List uuids; + + private Context context; + + private BulkAccessConditionConfigurationService bulkAccessConditionConfigurationService; + + private ResourcePolicyService resourcePolicyService; + + protected EPersonService epersonService; + + private ConfigurationService configurationService; + + private MediaFilterService mediaFilterService; + + private Map itemAccessConditions; + + private Map uploadAccessConditions; + + private final String ADD_MODE = "add"; + + private final String REPLACE_MODE = "replace"; + + private boolean help = false; + + protected String eperson = null; + + @Override + @SuppressWarnings("unchecked") + public void setup() throws ParseException { + + this.searchService = SearchUtils.getSearchService(); + this.itemService = ContentServiceFactory.getInstance().getItemService(); + this.resourcePolicyService = AuthorizeServiceFactory.getInstance().getResourcePolicyService(); + this.epersonService = EPersonServiceFactory.getInstance().getEPersonService(); + this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + mediaFilterService = MediaFilterServiceFactory.getInstance().getMediaFilterService(); + mediaFilterService.setLogHandler(handler); + this.bulkAccessConditionConfigurationService = new DSpace().getServiceManager().getServiceByName( + "bulkAccessConditionConfigurationService", BulkAccessConditionConfigurationService.class); + this.dSpaceObjectUtils = new DSpace().getServiceManager().getServiceByName( + DSpaceObjectUtilsImpl.class.getName(), DSpaceObjectUtilsImpl.class); + + BulkAccessConditionConfiguration bulkAccessConditionConfiguration = + bulkAccessConditionConfigurationService.getBulkAccessConditionConfiguration("default"); + + itemAccessConditions = bulkAccessConditionConfiguration + .getItemAccessConditionOptions() + .stream() + .collect(Collectors.toMap(AccessConditionOption::getName, Function.identity())); + + uploadAccessConditions = bulkAccessConditionConfiguration + .getBitstreamAccessConditionOptions() + .stream() + .collect(Collectors.toMap(AccessConditionOption::getName, Function.identity())); + + help = commandLine.hasOption('h'); + filename = commandLine.getOptionValue('f'); + uuids = commandLine.hasOption('u') ? Arrays.asList(commandLine.getOptionValues('u')) : null; + } + + @Override + public void internalRun() throws Exception { + + if (help) { + printHelp(); + return; + } + + ObjectMapper mapper = new ObjectMapper(); + mapper.setTimeZone(TimeZone.getTimeZone("UTC")); + BulkAccessControlInput accessControl; + context = new Context(Context.Mode.BATCH_EDIT); + setEPerson(context); + + if (!isAuthorized(context)) { + handler.logError("Current user is not eligible to execute script bulk-access-control"); + throw new AuthorizeException("Current user is not eligible to execute script bulk-access-control"); + } + + if (uuids == null || uuids.size() == 0) { + handler.logError("A target uuid must be provided with at least on uuid (run with -h flag for details)"); + throw new IllegalArgumentException("At least one target uuid must be provided"); + } + + InputStream inputStream = handler.getFileStream(context, filename) + .orElseThrow(() -> new IllegalArgumentException("Error reading file, the file couldn't be " + + "found for filename: " + filename)); + + try { + accessControl = mapper.readValue(inputStream, BulkAccessControlInput.class); + } catch (IOException e) { + handler.logError("Error parsing json file " + e.getMessage()); + throw new IllegalArgumentException("Error parsing json file", e); + } + try { + validate(accessControl); + updateItemsAndBitstreamsPolices(accessControl); + context.complete(); + } catch (Exception e) { + handler.handleException(e); + context.abort(); + } + } + + /** + * check the validation of mapped json data, it must + * provide item or bitstream information or both of them + * and check the validation of item node if provided, + * and check the validation of bitstream node if provided. + * + * @param accessControl mapped json data + * @throws SQLException if something goes wrong in the database + * @throws BulkAccessControlException if accessControl is invalid + */ + private void validate(BulkAccessControlInput accessControl) throws SQLException { + + AccessConditionItem item = accessControl.getItem(); + AccessConditionBitstream bitstream = accessControl.getBitstream(); + + if (Objects.isNull(item) && Objects.isNull(bitstream)) { + handler.logError("item or bitstream node must be provided"); + throw new BulkAccessControlException("item or bitstream node must be provided"); + } + + if (Objects.nonNull(item)) { + validateItemNode(item); + } + + if (Objects.nonNull(bitstream)) { + validateBitstreamNode(bitstream); + } + } + + /** + * check the validation of item node, the item mode + * must be provided with value 'add' or 'replace' + * if mode equals to add so the information + * of accessCondition must be provided, + * also checking that accessConditions information are valid. + * + * @param item the item node + * @throws BulkAccessControlException if item node is invalid + */ + private void validateItemNode(AccessConditionItem item) { + String mode = item.getMode(); + List accessConditions = item.getAccessConditions(); + + if (StringUtils.isEmpty(mode)) { + handler.logError("item mode node must be provided"); + throw new BulkAccessControlException("item mode node must be provided"); + } else if (!(StringUtils.equalsAny(mode, ADD_MODE, REPLACE_MODE))) { + handler.logError("wrong value for item mode<" + mode + ">"); + throw new BulkAccessControlException("wrong value for item mode<" + mode + ">"); + } else if (ADD_MODE.equals(mode) && isEmpty(accessConditions)) { + handler.logError("accessConditions of item must be provided with mode<" + ADD_MODE + ">"); + throw new BulkAccessControlException( + "accessConditions of item must be provided with mode<" + ADD_MODE + ">"); + } + + for (AccessCondition accessCondition : accessConditions) { + validateAccessCondition(accessCondition); + } + } + + /** + * check the validation of bitstream node, the bitstream mode + * must be provided with value 'add' or 'replace' + * if mode equals to add so the information of accessConditions + * must be provided, + * also checking that constraint information is valid, + * also checking that accessConditions information are valid. + * + * @param bitstream the bitstream node + * @throws SQLException if something goes wrong in the database + * @throws BulkAccessControlException if bitstream node is invalid + */ + private void validateBitstreamNode(AccessConditionBitstream bitstream) throws SQLException { + String mode = bitstream.getMode(); + List accessConditions = bitstream.getAccessConditions(); + + if (StringUtils.isEmpty(mode)) { + handler.logError("bitstream mode node must be provided"); + throw new BulkAccessControlException("bitstream mode node must be provided"); + } else if (!(StringUtils.equalsAny(mode, ADD_MODE, REPLACE_MODE))) { + handler.logError("wrong value for bitstream mode<" + mode + ">"); + throw new BulkAccessControlException("wrong value for bitstream mode<" + mode + ">"); + } else if (ADD_MODE.equals(mode) && isEmpty(accessConditions)) { + handler.logError("accessConditions of bitstream must be provided with mode<" + ADD_MODE + ">"); + throw new BulkAccessControlException( + "accessConditions of bitstream must be provided with mode<" + ADD_MODE + ">"); + } + + validateConstraint(bitstream); + + for (AccessCondition accessCondition : bitstream.getAccessConditions()) { + validateAccessCondition(accessCondition); + } + } + + /** + * check the validation of constraint node if provided, + * constraint isn't supported when multiple uuids are provided + * or when uuid isn't an Item + * + * @param bitstream the bitstream node + * @throws SQLException if something goes wrong in the database + * @throws BulkAccessControlException if constraint node is invalid + */ + private void validateConstraint(AccessConditionBitstream bitstream) throws SQLException { + if (uuids.size() > 1 && containsConstraints(bitstream)) { + handler.logError("constraint isn't supported when multiple uuids are provided"); + throw new BulkAccessControlException("constraint isn't supported when multiple uuids are provided"); + } else if (uuids.size() == 1 && containsConstraints(bitstream)) { + DSpaceObject dso = + dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(uuids.get(0))); + + if (Objects.nonNull(dso) && dso.getType() != Constants.ITEM) { + handler.logError("constraint is not supported when uuid isn't an Item"); + throw new BulkAccessControlException("constraint is not supported when uuid isn't an Item"); + } + } + } + + /** + * check the validation of access condition, + * the access condition name must equal to one of configured access conditions, + * then call {@link AccessConditionOption#validateResourcePolicy( + * Context, String, Date, Date)} if exception happens so, it's invalid. + * + * @param accessCondition the accessCondition + * @throws BulkAccessControlException if the accessCondition is invalid + */ + private void validateAccessCondition(AccessCondition accessCondition) { + + if (!itemAccessConditions.containsKey(accessCondition.getName())) { + handler.logError("wrong access condition <" + accessCondition.getName() + ">"); + throw new BulkAccessControlException("wrong access condition <" + accessCondition.getName() + ">"); + } + + try { + itemAccessConditions.get(accessCondition.getName()).validateResourcePolicy( + context, accessCondition.getName(), accessCondition.getStartDate(), accessCondition.getEndDate()); + } catch (Exception e) { + handler.logError("invalid access condition, " + e.getMessage()); + handler.handleException(e); + } + } + + /** + * find all items of provided {@link #uuids} from solr, + * then update the resource policies of items + * or bitstreams of items (only bitstreams of ORIGINAL bundles) + * and derivative bitstreams, or both of them. + * + * @param accessControl the access control input + * @throws SQLException if something goes wrong in the database + * @throws SearchServiceException if a search error occurs + * @throws AuthorizeException if an authorization error occurs + */ + private void updateItemsAndBitstreamsPolices(BulkAccessControlInput accessControl) + throws SQLException, SearchServiceException, AuthorizeException { + + int counter = 0; + int start = 0; + int limit = 20; + + String query = buildSolrQuery(uuids); + + Iterator itemIterator = findItems(query, start, limit); + + while (itemIterator.hasNext()) { + + Item item = context.reloadEntity(itemIterator.next()); + + if (Objects.nonNull(accessControl.getItem())) { + updateItemPolicies(item, accessControl); + } + + if (Objects.nonNull(accessControl.getBitstream())) { + updateBitstreamsPolicies(item, accessControl); + } + + context.commit(); + context.uncacheEntity(item); + counter++; + + if (counter == limit) { + counter = 0; + start += limit; + itemIterator = findItems(query, start, limit); + } + } + } + + private String buildSolrQuery(List uuids) throws SQLException { + String [] query = new String[uuids.size()]; + + for (int i = 0 ; i < query.length ; i++) { + DSpaceObject dso = dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(uuids.get(i))); + + if (dso.getType() == Constants.COMMUNITY) { + query[i] = "location.comm:" + dso.getID(); + } else if (dso.getType() == Constants.COLLECTION) { + query[i] = "location.coll:" + dso.getID(); + } else if (dso.getType() == Constants.ITEM) { + query[i] = "search.resourceid:" + dso.getID(); + } + } + return StringUtils.joinWith(" OR ", query); + } + + private Iterator findItems(String query, int start, int limit) + throws SearchServiceException { + + DiscoverQuery discoverQuery = buildDiscoveryQuery(query, start, limit); + + return searchService.search(context, discoverQuery) + .getIndexableObjects() + .stream() + .map(indexableObject -> + ((IndexableItem) indexableObject).getIndexedObject()) + .collect(Collectors.toList()) + .iterator(); + } + + private DiscoverQuery buildDiscoveryQuery(String query, int start, int limit) { + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setDSpaceObjectFilter(IndexableItem.TYPE); + discoverQuery.setQuery(query); + discoverQuery.setStart(start); + discoverQuery.setMaxResults(limit); + + return discoverQuery; + } + + /** + * update the item resource policies, + * when mode equals to 'replace' will remove + * all current resource polices of types 'TYPE_CUSTOM' + * and 'TYPE_INHERITED' then, set the new resource policies. + * + * @param item the item + * @param accessControl the access control input + * @throws SQLException if something goes wrong in the database + * @throws AuthorizeException if an authorization error occurs + */ + private void updateItemPolicies(Item item, BulkAccessControlInput accessControl) + throws SQLException, AuthorizeException { + + AccessConditionItem acItem = accessControl.getItem(); + + if (REPLACE_MODE.equals(acItem.getMode())) { + removeReadPolicies(item, TYPE_CUSTOM); + removeReadPolicies(item, TYPE_INHERITED); + } + + setItemPolicies(item, accessControl); + logInfo(acItem.getAccessConditions(), acItem.getMode(), item); + } + + /** + * create the new resource policies of item. + * then, call {@link ItemService#adjustItemPolicies( + * Context, Item, Collection)} to adjust item's default policies. + * + * @param item the item + * @param accessControl the access control input + * @throws SQLException if something goes wrong in the database + * @throws AuthorizeException if an authorization error occurs + */ + private void setItemPolicies(Item item, BulkAccessControlInput accessControl) + throws SQLException, AuthorizeException { + + accessControl + .getItem() + .getAccessConditions() + .forEach(accessCondition -> createResourcePolicy(item, accessCondition, + itemAccessConditions.get(accessCondition.getName()))); + + itemService.adjustItemPolicies(context, item, item.getOwningCollection(), false); + } + + /** + * update the resource policies of all item's bitstreams + * or bitstreams specified into constraint node, + * and derivative bitstreams. + * + * NOTE: only bitstreams of ORIGINAL bundles + * + * @param item the item contains bitstreams + * @param accessControl the access control input + */ + private void updateBitstreamsPolicies(Item item, BulkAccessControlInput accessControl) { + AccessConditionBitstream.Constraint constraints = accessControl.getBitstream().getConstraints(); + + // look over all the bundles and force initialization of bitstreams collection + // to avoid lazy initialization exception + long count = item.getBundles() + .stream() + .flatMap(bundle -> + bundle.getBitstreams().stream()) + .count(); + + item.getBundles(CONTENT_BUNDLE_NAME).stream() + .flatMap(bundle -> bundle.getBitstreams().stream()) + .filter(bitstream -> constraints == null || + constraints.getUuid() == null || + constraints.getUuid().size() == 0 || + constraints.getUuid().contains(bitstream.getID().toString())) + .forEach(bitstream -> updateBitstreamPolicies(bitstream, item, accessControl)); + } + + /** + * check that the bitstream node is existed, + * and contains constraint node, + * and constraint contains uuids. + * + * @param bitstream the bitstream node + * @return true when uuids of constraint of bitstream is not empty, + * otherwise false + */ + private boolean containsConstraints(AccessConditionBitstream bitstream) { + return Objects.nonNull(bitstream) && + Objects.nonNull(bitstream.getConstraints()) && + isNotEmpty(bitstream.getConstraints().getUuid()); + } + + /** + * update the bitstream resource policies, + * when mode equals to replace will remove + * all current resource polices of types 'TYPE_CUSTOM' + * and 'TYPE_INHERITED' then, set the new resource policies. + * + * @param bitstream the bitstream + * @param item the item of bitstream + * @param accessControl the access control input + * @throws RuntimeException if something goes wrong in the database + * or an authorization error occurs + */ + private void updateBitstreamPolicies(Bitstream bitstream, Item item, BulkAccessControlInput accessControl) { + + AccessConditionBitstream acBitstream = accessControl.getBitstream(); + + if (REPLACE_MODE.equals(acBitstream.getMode())) { + removeReadPolicies(bitstream, TYPE_CUSTOM); + removeReadPolicies(bitstream, TYPE_INHERITED); + } + + try { + setBitstreamPolicies(bitstream, item, accessControl); + logInfo(acBitstream.getAccessConditions(), acBitstream.getMode(), bitstream); + } catch (SQLException | AuthorizeException e) { + throw new RuntimeException(e); + } + + } + + /** + * remove dspace object's read policies. + * + * @param dso the dspace object + * @param type resource policy type + * @throws BulkAccessControlException if something goes wrong + * in the database or an authorization error occurs + */ + private void removeReadPolicies(DSpaceObject dso, String type) { + try { + resourcePolicyService.removePolicies(context, dso, type, Constants.READ); + } catch (SQLException | AuthorizeException e) { + throw new BulkAccessControlException(e); + } + } + + /** + * create the new resource policies of bitstream. + * then, call {@link ItemService#adjustItemPolicies( + * Context, Item, Collection)} to adjust bitstream's default policies. + * and also update the resource policies of its derivative bitstreams. + * + * @param bitstream the bitstream + * @param item the item of bitstream + * @param accessControl the access control input + * @throws SQLException if something goes wrong in the database + * @throws AuthorizeException if an authorization error occurs + */ + private void setBitstreamPolicies(Bitstream bitstream, Item item, BulkAccessControlInput accessControl) + throws SQLException, AuthorizeException { + + accessControl.getBitstream() + .getAccessConditions() + .forEach(accessCondition -> createResourcePolicy(bitstream, accessCondition, + uploadAccessConditions.get(accessCondition.getName()))); + + itemService.adjustBitstreamPolicies(context, item, item.getOwningCollection(), bitstream); + mediaFilterService.updatePoliciesOfDerivativeBitstreams(context, item, bitstream); + } + + /** + * create the resource policy from the information + * comes from the access condition. + * + * @param obj the dspace object + * @param accessCondition the access condition + * @param accessConditionOption the access condition option + * @throws BulkAccessControlException if an exception occurs + */ + private void createResourcePolicy(DSpaceObject obj, AccessCondition accessCondition, + AccessConditionOption accessConditionOption) { + + String name = accessCondition.getName(); + String description = accessCondition.getDescription(); + Date startDate = accessCondition.getStartDate(); + Date endDate = accessCondition.getEndDate(); + + try { + accessConditionOption.createResourcePolicy(context, obj, name, description, startDate, endDate); + } catch (Exception e) { + throw new BulkAccessControlException(e); + } + } + + /** + * Set the eperson in the context + * + * @param context the context + * @throws SQLException if database error + */ + protected void setEPerson(Context context) throws SQLException { + EPerson myEPerson = epersonService.find(context, this.getEpersonIdentifier()); + + if (myEPerson == null) { + handler.logError("EPerson cannot be found: " + this.getEpersonIdentifier()); + throw new UnsupportedOperationException("EPerson cannot be found: " + this.getEpersonIdentifier()); + } + + context.setCurrentUser(myEPerson); + } + + private void logInfo(List accessConditions, String mode, DSpaceObject dso) { + String type = dso.getClass().getSimpleName(); + + if (REPLACE_MODE.equals(mode) && isEmpty(accessConditions)) { + handler.logInfo("Cleaning " + type + " {" + dso.getID() + "} policies"); + handler.logInfo("Inheriting policies from owning Collection in " + type + " {" + dso.getID() + "}"); + return; + } + + StringBuilder message = new StringBuilder(); + message.append(mode.equals(ADD_MODE) ? "Adding " : "Replacing ") + .append(type) + .append(" {") + .append(dso.getID()) + .append("} policy") + .append(mode.equals(ADD_MODE) ? " with " : " to ") + .append("access conditions:"); + + AppendAccessConditionsInfo(message, accessConditions); + + handler.logInfo(message.toString()); + + if (REPLACE_MODE.equals(mode) && isAppendModeEnabled()) { + handler.logInfo("Inheriting policies from owning Collection in " + type + " {" + dso.getID() + "}"); + } + } + + private void AppendAccessConditionsInfo(StringBuilder message, List accessConditions) { + DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); + message.append("{"); + + for (int i = 0; i < accessConditions.size(); i++) { + message.append(accessConditions.get(i).getName()); + + Optional.ofNullable(accessConditions.get(i).getStartDate()) + .ifPresent(date -> message.append(", start_date=" + dateFormat.format(date))); + + Optional.ofNullable(accessConditions.get(i).getEndDate()) + .ifPresent(date -> message.append(", end_date=" + dateFormat.format(date))); + + if (i != accessConditions.size() - 1) { + message.append(", "); + } + } + + message.append("}"); + } + + private boolean isAppendModeEnabled() { + return configurationService.getBooleanProperty("core.authorization.installitem.inheritance-read.append-mode"); + } + + protected boolean isAuthorized(Context context) { + return true; + } + + @Override + @SuppressWarnings("unchecked") + public BulkAccessControlScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager() + .getServiceByName("bulk-access-control", BulkAccessControlScriptConfiguration.class); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCli.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCli.java new file mode 100644 index 000000000000..4e8cfe480eeb --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCli.java @@ -0,0 +1,66 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +import java.sql.SQLException; +import java.util.Arrays; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.scripts.DSpaceCommandLineParameter; + +/** + * Extension of {@link BulkAccessControl} for CLI. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + * + */ +public class BulkAccessControlCli extends BulkAccessControl { + + @Override + protected void setEPerson(Context context) throws SQLException { + EPerson myEPerson; + eperson = commandLine.getOptionValue('e'); + + if (eperson == null) { + handler.logError("An eperson to do the the Bulk Access Control must be specified " + + "(run with -h flag for details)"); + throw new UnsupportedOperationException("An eperson to do the Bulk Access Control must be specified"); + } + + if (StringUtils.contains(eperson, '@')) { + myEPerson = epersonService.findByEmail(context, eperson); + } else { + myEPerson = epersonService.find(context, UUID.fromString(eperson)); + } + + if (myEPerson == null) { + handler.logError("EPerson cannot be found: " + eperson + " (run with -h flag for details)"); + throw new UnsupportedOperationException("EPerson cannot be found: " + eperson); + } + + context.setCurrentUser(myEPerson); + } + + @Override + protected boolean isAuthorized(Context context) { + + if (context.getCurrentUser() == null) { + return false; + } + + return getScriptConfiguration().isAllowedToExecute(context, + Arrays.stream(commandLine.getOptions()) + .map(option -> + new DSpaceCommandLineParameter("-" + option.getOpt(), option.getValue())) + .collect(Collectors.toList())); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java new file mode 100644 index 000000000000..951c93db3030 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +import java.io.InputStream; + +import org.apache.commons.cli.Options; + +/** + * Extension of {@link BulkAccessControlScriptConfiguration} for CLI. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + * + */ +public class BulkAccessControlCliScriptConfiguration + extends BulkAccessControlScriptConfiguration { + + @Override + public Options getOptions() { + Options options = new Options(); + + options.addOption("u", "uuid", true, "target uuids of communities/collections/items"); + options.getOption("u").setType(String.class); + options.getOption("u").setRequired(true); + + options.addOption("f", "file", true, "source json file"); + options.getOption("f").setType(InputStream.class); + options.getOption("f").setRequired(true); + + options.addOption("e", "eperson", true, "email of EPerson used to perform actions"); + options.getOption("e").setRequired(true); + + options.addOption("h", "help", false, "help"); + + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java new file mode 100644 index 000000000000..5196247f94cb --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java @@ -0,0 +1,110 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +import java.io.InputStream; +import java.sql.SQLException; +import java.util.List; +import java.util.Objects; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.apache.commons.cli.Options; +import org.dspace.app.util.DSpaceObjectUtilsImpl; +import org.dspace.app.util.service.DSpaceObjectUtils; +import org.dspace.content.DSpaceObject; +import org.dspace.core.Context; +import org.dspace.scripts.DSpaceCommandLineParameter; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.dspace.utils.DSpace; + +/** + * Script configuration for {@link BulkAccessControl}. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + * + * @param the {@link BulkAccessControl} type + */ +public class BulkAccessControlScriptConfiguration extends ScriptConfiguration { + + private Class dspaceRunnableClass; + + @Override + public boolean isAllowedToExecute(Context context, List commandLineParameters) { + + try { + if (Objects.isNull(commandLineParameters)) { + return authorizeService.isAdmin(context) || authorizeService.isComColAdmin(context) + || authorizeService.isItemAdmin(context); + } else { + List dspaceObjectIDs = + commandLineParameters.stream() + .filter(parameter -> "-u".equals(parameter.getName())) + .map(DSpaceCommandLineParameter::getValue) + .collect(Collectors.toList()); + + DSpaceObjectUtils dSpaceObjectUtils = new DSpace().getServiceManager().getServiceByName( + DSpaceObjectUtilsImpl.class.getName(), DSpaceObjectUtilsImpl.class); + + for (String dspaceObjectID : dspaceObjectIDs) { + + DSpaceObject dso = dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(dspaceObjectID)); + + if (Objects.isNull(dso)) { + throw new IllegalArgumentException(); + } + + if (!authorizeService.isAdmin(context, dso)) { + return false; + } + } + } + } catch (SQLException e) { + throw new RuntimeException(e); + } + + return true; + } + + @Override + public Options getOptions() { + if (options == null) { + Options options = new Options(); + + options.addOption("u", "uuid", true, "target uuids of communities/collections/items"); + options.getOption("u").setType(String.class); + options.getOption("u").setRequired(true); + + options.addOption("f", "file", true, "source json file"); + options.getOption("f").setType(InputStream.class); + options.getOption("f").setRequired(true); + + options.addOption("h", "help", false, "help"); + + super.options = options; + } + return options; + } + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + /** + * Generic setter for the dspaceRunnableClass + * + * @param dspaceRunnableClass The dspaceRunnableClass to be set on this + * BulkImportScriptConfiguration + */ + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/exception/BulkAccessControlException.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/exception/BulkAccessControlException.java new file mode 100644 index 000000000000..092611eb0654 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/exception/BulkAccessControlException.java @@ -0,0 +1,48 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.exception; + +/** + * Exception for errors that occurs during the bulk access control + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + * + */ +public class BulkAccessControlException extends RuntimeException { + + private static final long serialVersionUID = -74730626862418515L; + + /** + * Constructor with error message and cause. + * + * @param message the error message + * @param cause the error cause + */ + public BulkAccessControlException(String message, Throwable cause) { + super(message, cause); + } + + /** + * Constructor with error message. + * + * @param message the error message + */ + public BulkAccessControlException(String message) { + super(message); + } + + /** + * Constructor with error cause. + * + * @param cause the error cause + */ + public BulkAccessControlException(Throwable cause) { + super(cause); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessCondition.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessCondition.java new file mode 100644 index 000000000000..6cf95e0e2179 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessCondition.java @@ -0,0 +1,59 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.model; + +import java.util.Date; + +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import org.dspace.app.bulkaccesscontrol.BulkAccessControl; +import org.dspace.util.MultiFormatDateDeserializer; + +/** + * Class that model the values of an Access Condition as expressed in the {@link BulkAccessControl} input file + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class AccessCondition { + + private String name; + + private String description; + + @JsonDeserialize(using = MultiFormatDateDeserializer.class) + private Date startDate; + + @JsonDeserialize(using = MultiFormatDateDeserializer.class) + private Date endDate; + + public AccessCondition() { + } + + public AccessCondition(String name, String description, Date startDate, Date endDate) { + this.name = name; + this.description = description; + this.startDate = startDate; + this.endDate = endDate; + } + + public String getName() { + return name; + } + + public String getDescription() { + return description; + } + + public Date getStartDate() { + return startDate; + } + + public Date getEndDate() { + return endDate; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java new file mode 100644 index 000000000000..2176e24d7f9d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java @@ -0,0 +1,69 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.model; + +import java.util.ArrayList; +import java.util.List; + +import org.dspace.app.bulkaccesscontrol.BulkAccessControl; + +/** + * Class that model the value of bitstream node + * from json file of the {@link BulkAccessControl} + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class AccessConditionBitstream { + + private String mode; + + private Constraint constraints; + + private List accessConditions; + + public String getMode() { + return mode; + } + + public void setMode(String mode) { + this.mode = mode; + } + + public Constraint getConstraints() { + return constraints; + } + + public void setConstraints(Constraint constraints) { + this.constraints = constraints; + } + + public List getAccessConditions() { + if (accessConditions == null) { + return new ArrayList<>(); + } + return accessConditions; + } + + public void setAccessConditions(List accessConditions) { + this.accessConditions = accessConditions; + } + + public class Constraint { + + private List uuid; + + public List getUuid() { + return uuid; + } + + public void setUuid(List uuid) { + this.uuid = uuid; + } + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java new file mode 100644 index 000000000000..c482dfc34d65 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java @@ -0,0 +1,45 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.model; + +import java.util.ArrayList; +import java.util.List; + +import org.dspace.app.bulkaccesscontrol.BulkAccessControl; + +/** + * Class that model the value of item node + * from json file of the {@link BulkAccessControl} + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class AccessConditionItem { + + String mode; + + List accessConditions; + + public String getMode() { + return mode; + } + + public void setMode(String mode) { + this.mode = mode; + } + + public List getAccessConditions() { + if (accessConditions == null) { + return new ArrayList<>(); + } + return accessConditions; + } + + public void setAccessConditions(List accessConditions) { + this.accessConditions = accessConditions; + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessConditionConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessConditionConfiguration.java new file mode 100644 index 000000000000..a2ebbe5a12d4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessConditionConfiguration.java @@ -0,0 +1,50 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.model; + +import java.util.List; + +import org.dspace.submit.model.AccessConditionOption; + +/** + * A collection of conditions to be met when bulk access condition. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class BulkAccessConditionConfiguration { + + private String name; + private List itemAccessConditionOptions; + private List bitstreamAccessConditionOptions; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public List getItemAccessConditionOptions() { + return itemAccessConditionOptions; + } + + public void setItemAccessConditionOptions( + List itemAccessConditionOptions) { + this.itemAccessConditionOptions = itemAccessConditionOptions; + } + + public List getBitstreamAccessConditionOptions() { + return bitstreamAccessConditionOptions; + } + + public void setBitstreamAccessConditionOptions( + List bitstreamAccessConditionOptions) { + this.bitstreamAccessConditionOptions = bitstreamAccessConditionOptions; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessControlInput.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessControlInput.java new file mode 100644 index 000000000000..0f8852a71f7d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessControlInput.java @@ -0,0 +1,72 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.model; + +import org.dspace.app.bulkaccesscontrol.BulkAccessControl; + +/** + * Class that model the content of the JSON file used as input for the {@link BulkAccessControl} + * + *
+ * {
+ * item: {
+ * mode: "replace",
+ * accessConditions: [
+ * {
+ * "name": "openaccess"
+ * }
+ * ]
+ * },
+ * bitstream: {
+ * constraints: {
+ * uuid: [bit-uuid1, bit-uuid2, ..., bit-uuidN],
+ * },
+ * mode: "add",
+ * accessConditions: [
+ * {
+ * "name": "embargo",
+ * "startDate": "2024-06-24T23:59:59.999+0000"
+ * }
+ * ]
+ * }
+ * } + *
+ * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class BulkAccessControlInput { + + AccessConditionItem item; + + AccessConditionBitstream bitstream; + + public BulkAccessControlInput() { + } + + public BulkAccessControlInput(AccessConditionItem item, + AccessConditionBitstream bitstream) { + this.item = item; + this.bitstream = bitstream; + } + + public AccessConditionItem getItem() { + return item; + } + + public void setItem(AccessConditionItem item) { + this.item = item; + } + + public AccessConditionBitstream getBitstream() { + return bitstream; + } + + public void setBitstream(AccessConditionBitstream bitstream) { + this.bitstream = bitstream; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/service/BulkAccessConditionConfigurationService.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/service/BulkAccessConditionConfigurationService.java new file mode 100644 index 000000000000..321b6d928e92 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/service/BulkAccessConditionConfigurationService.java @@ -0,0 +1,45 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.service; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.collections4.CollectionUtils; +import org.dspace.app.bulkaccesscontrol.model.BulkAccessConditionConfiguration; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Simple bean to manage different Bulk Access Condition configurations + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class BulkAccessConditionConfigurationService { + + @Autowired + private List bulkAccessConditionConfigurations; + + public List getBulkAccessConditionConfigurations() { + if (CollectionUtils.isEmpty(bulkAccessConditionConfigurations)) { + return new ArrayList<>(); + } + return bulkAccessConditionConfigurations; + } + + public BulkAccessConditionConfiguration getBulkAccessConditionConfiguration(String name) { + return getBulkAccessConditionConfigurations().stream() + .filter(x -> name.equals(x.getName())) + .findFirst() + .orElse(null); + } + + public void setBulkAccessConditionConfigurations( + List bulkAccessConditionConfigurations) { + this.bulkAccessConditionConfigurations = bulkAccessConditionConfigurations; + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImport.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImport.java index 491039cff835..ba348be0fe4b 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImport.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImport.java @@ -13,7 +13,7 @@ import static org.apache.commons.lang3.StringUtils.isAllBlank; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; -import static org.apache.commons.lang3.StringUtils.split; +import static org.apache.commons.lang3.StringUtils.splitByWholeSeparator; import static org.apache.commons.lang3.StringUtils.startsWith; import static org.apache.commons.lang3.exception.ExceptionUtils.getRootCauseMessage; import static org.apache.commons.lang3.math.NumberUtils.isCreatable; @@ -258,7 +258,7 @@ public void setup() throws ParseException { collectionId = commandLine.getOptionValue('c'); filename = commandLine.getOptionValue('f'); - if (commandLine.hasOption('e')) { + if (commandLine.hasOption("er")) { abortOnError = true; } } @@ -266,11 +266,9 @@ public void setup() throws ParseException { @Override public void internalRun() throws Exception { context = new Context(Context.Mode.BATCH_EDIT); - assignCurrentUserInContext(); + assignCurrentUserInContext(context); assignSpecialGroupsInContext(); - context.turnOffAuthorisationSystem(); - InputStream inputStream = handler.getFileStream(context, filename) .orElseThrow(() -> new IllegalArgumentException("Error reading file, the file couldn't be " + "found for filename: " + filename)); @@ -285,6 +283,7 @@ public void internalRun() throws Exception { } try { + context.turnOffAuthorisationSystem(); performImport(inputStream); context.complete(); context.restoreAuthSystemState(); @@ -609,7 +608,8 @@ private boolean areMetadataValuesValid(Row row, boolean manyMetadataValuesAllowe for (int index = firstMetadataIndex; index < row.getLastCellNum(); index++) { String cellValue = WorkbookUtils.getCellValue(row, index); - String[] values = isNotBlank(cellValue) ? split(cellValue, METADATA_SEPARATOR) : new String[] { "" }; + String[] values = isNotBlank(cellValue) ? splitByWholeSeparator(cellValue, METADATA_SEPARATOR) + : new String[] { "" }; if (values.length > 1 && !manyMetadataValuesAllowed) { handleValidationErrorOnRow(row, "Multiple metadata value on the same cell not allowed " + "in the metadata group sheets: " + cellValue); @@ -743,7 +743,7 @@ private List validateAccessConditions(Row row) { Map accessConditionOptions = getUploadAccessConditions(); return Arrays.stream(getAccessConditionValues(row)) - .map(accessCondition -> split(accessCondition, ACCESS_CONDITION_ATTRIBUTES_SEPARATOR)[0]) + .map(accessCondition -> splitByWholeSeparator(accessCondition, ACCESS_CONDITION_ATTRIBUTES_SEPARATOR)[0]) .filter(accessConditionName -> !accessConditionOptions.containsKey(accessConditionName)) .collect(Collectors.toList()); } @@ -788,14 +788,14 @@ private List buildAccessConditions(Row row, String[] accessCond } return Arrays.stream(accessConditions) - .map(accessCondition -> split(accessCondition, ACCESS_CONDITION_ATTRIBUTES_SEPARATOR)) + .map(accessCondition -> splitByWholeSeparator(accessCondition, ACCESS_CONDITION_ATTRIBUTES_SEPARATOR)) .map(accessConditionAttributes -> buildAccessCondition(accessConditionAttributes)) .collect(Collectors.toList()); } private String[] getAccessConditionValues(Row row) { String accessConditionCellValue = getCellValue(row, ACCESS_CONDITION_HEADER); - return split(accessConditionCellValue, METADATA_SEPARATOR); + return splitByWholeSeparator(accessConditionCellValue, METADATA_SEPARATOR); } private AccessCondition buildAccessCondition(String[] accessCondition) { @@ -1306,12 +1306,13 @@ private void removeSingleMetadata(DSpaceObject dso, MetadataField field, String } private String getMetadataField(String field) { - return field.contains(LANGUAGE_SEPARATOR_PREFIX) ? split(field, LANGUAGE_SEPARATOR_PREFIX)[0] : field; + return field.contains(LANGUAGE_SEPARATOR_PREFIX) ? splitByWholeSeparator(field, LANGUAGE_SEPARATOR_PREFIX)[0] + : field; } private String getMetadataLanguage(String field) { if (field.contains(LANGUAGE_SEPARATOR_PREFIX)) { - return split(field, LANGUAGE_SEPARATOR_PREFIX)[1].replace(LANGUAGE_SEPARATOR_SUFFIX, ""); + return splitByWholeSeparator(field, LANGUAGE_SEPARATOR_PREFIX)[1].replace(LANGUAGE_SEPARATOR_SUFFIX, ""); } return null; } @@ -1364,7 +1365,8 @@ private MultiValuedMap getMetadataFromRow(Row row, Map< if (index >= firstMetadataIndex) { String cellValue = WorkbookUtils.getCellValue(row, index); - String[] values = isNotBlank(cellValue) ? split(cellValue, METADATA_SEPARATOR) : new String[] { "" }; + String[] values = isNotBlank(cellValue) ? splitByWholeSeparator(cellValue, METADATA_SEPARATOR) + : new String[] { "" }; List metadataValues = Arrays.stream(values) .map(value -> buildMetadataValueVO(row, value, isMetadataGroupsSheet)) @@ -1601,7 +1603,7 @@ private void rollback() { } } - private void assignCurrentUserInContext() throws SQLException { + protected void assignCurrentUserInContext(Context context) throws SQLException, ParseException { UUID uuid = getEpersonIdentifier(); if (uuid != null) { EPerson ePerson = EPersonServiceFactory.getInstance().getEPersonService().find(context, uuid); diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportCli.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportCli.java index 36da59c7a252..c1399c61413b 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportCli.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportCli.java @@ -7,6 +7,13 @@ */ package org.dspace.app.bulkedit; +import java.sql.SQLException; + +import org.apache.commons.cli.ParseException; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.factory.EPersonServiceFactory; + /** * Extension of {@link BulkImport} for CLI. * @@ -15,4 +22,24 @@ */ public class BulkImportCli extends BulkImport { + @Override + protected void assignCurrentUserInContext(Context context) throws ParseException { + if (commandLine.hasOption('e')) { + String ePersonEmail = commandLine.getOptionValue('e'); + try { + EPerson ePerson = + EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, ePersonEmail); + if (ePerson == null) { + super.handler.logError("EPerson not found: " + ePersonEmail); + throw new IllegalArgumentException("Unable to find a user with email: " + ePersonEmail); + } + context.setCurrentUser(ePerson); + } catch (SQLException e) { + throw new IllegalArgumentException("SQLException trying to find user with email: " + ePersonEmail); + } + } else { + throw new ParseException("Required parameter -e missing!"); + } + } + } diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportCliScriptConfiguration.java index f79c03e041e2..5e34f6a58464 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportCliScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportCliScriptConfiguration.java @@ -7,6 +7,8 @@ */ package org.dspace.app.bulkedit; +import org.apache.commons.cli.Options; + /** * Extension of {@link BulkImportScriptConfiguration} for CLI. * @@ -15,5 +17,13 @@ */ public class BulkImportCliScriptConfiguration extends BulkImportScriptConfiguration { + @Override + public Options getOptions() { + Options options = super.getOptions(); + options.addOption("e", "email", true, "email address of user"); + options.getOption("e").setRequired(true); + super.options = options; + return options; + } } diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportScriptConfiguration.java index e2fd7bacd0e1..3530687bf36f 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportScriptConfiguration.java @@ -52,9 +52,9 @@ public Options getOptions() { options.getOption("f").setType(InputStream.class); options.getOption("f").setRequired(true); - options.addOption("e", "concludeOnError", false, "conclude the import at the first error"); - options.getOption("e").setType(boolean.class); - options.getOption("e").setRequired(false); + options.addOption("er", "concludeOnError", false, "conclude the import at the first error"); + options.getOption("er").setType(boolean.class); + options.getOption("er").setRequired(false); super.options = options; } diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java index 9ccd53944a24..fb228e7041b8 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java @@ -7,33 +7,16 @@ */ package org.dspace.app.bulkedit; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link MetadataDeletion} script. */ public class MetadataDeletionScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java index 31556afc8d3d..aa76c09c0a5b 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java @@ -7,22 +7,14 @@ */ package org.dspace.app.bulkedit; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link MetadataExport} script */ public class MetadataExportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +31,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java index 5a2e62332492..5a37d5676598 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java @@ -573,6 +573,10 @@ public List runImport(Context c, boolean change, wfItem = workflowService.startWithoutNotify(c, wsItem); } } else { + // Add provenance info + String provenance = installItemService.getSubmittedByProvenanceMessage(c, wsItem.getItem()); + itemService.addMetadata(c, item, MetadataSchemaEnum.DC.getName(), + "description", "provenance", "en", provenance); // Install the item installItemService.installItem(c, wsItem); } @@ -1369,8 +1373,8 @@ private int displayChanges(List changes, boolean changed) { * is the field is defined as authority controlled */ private boolean isAuthorityControlledField(String md) { - String mdf = StringUtils.substringAfter(md, ":"); - mdf = StringUtils.substringBefore(mdf, "["); + String mdf = md.contains(":") ? StringUtils.substringAfter(md, ":") : md; + mdf = mdf.contains("[") ? StringUtils.substringBefore(mdf, "[") : mdf; return metadataAuthorityService.isAuthorityAllowed(mdf.replaceAll("\\.", "_"), Constants.ITEM, null); } diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java index 65994040badc..ce2f7fb68af1 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java @@ -8,22 +8,15 @@ package org.dspace.app.bulkedit; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link MetadataImport} script */ public class MetadataImportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -40,15 +33,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkimport/model/BulkImportSheet.java b/dspace-api/src/main/java/org/dspace/app/bulkimport/model/BulkImportSheet.java index 14fbb60524fb..53c5f9b99166 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkimport/model/BulkImportSheet.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkimport/model/BulkImportSheet.java @@ -15,6 +15,7 @@ import java.util.List; import java.util.Map; +import org.apache.commons.lang.StringUtils; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Workbook; @@ -107,7 +108,12 @@ public void appendValueOnLastRow(String header, String value, String separator) throw new IllegalArgumentException("Unknown header '" + header + "'"); } String cellContent = WorkbookUtils.getCellValue(lastRow, column); - createCell(lastRow, column, isEmpty(cellContent) ? value : cellContent + separator + value); + createCell(lastRow, column, + getValueLimitedByLength(isEmpty(cellContent) ? value : cellContent + separator + value)); + } + + private String getValueLimitedByLength(String value) { + return StringUtils.length(value) > 32726 ? value.substring(0, 32725) + "…" : value; } } diff --git a/dspace-api/src/main/java/org/dspace/app/deduplication/service/impl/SolrDedupServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/deduplication/service/impl/SolrDedupServiceImpl.java index 6f719ff85f2c..e12f1100be10 100644 --- a/dspace-api/src/main/java/org/dspace/app/deduplication/service/impl/SolrDedupServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/deduplication/service/impl/SolrDedupServiceImpl.java @@ -43,8 +43,8 @@ import org.dspace.app.deduplication.service.DedupService; import org.dspace.app.deduplication.service.SearchDeduplication; import org.dspace.app.deduplication.service.SolrDedupServiceIndexPlugin; -import org.dspace.app.deduplication.utils.DedupUtils; import org.dspace.app.deduplication.utils.DuplicateItemInfo; +import org.dspace.app.deduplication.utils.IDedupUtils; import org.dspace.app.deduplication.utils.Signature; import org.dspace.app.util.Util; import org.dspace.authorize.AuthorizeException; @@ -174,7 +174,7 @@ public class SolrDedupServiceImpl implements DedupService { protected VersioningService versioningService; @Autowired(required = true) - protected DedupUtils dedupUtils; + protected IDedupUtils dedupUtils; /*** * Deduplication status @@ -314,6 +314,22 @@ private void fillSignature(Context ctx, DSpaceObject iu, Map plainSignatures = algo.getPlainSignature(iu, ctx); + for (String signature : plainSignatures) { + if (StringUtils.isNotEmpty(signature)) { + String key = "plain_" + algo.getSignatureType() + "_signature"; + if (tmpMapFilter.containsKey(key)) { + List obj = tmpMapFilter.get(key); + obj.add(signature); + tmpMapFilter.put(key, obj); + } else { + List obj = new ArrayList(); + obj.add(signature); + tmpMapFilter.put(key, obj); + } + } + } } } @@ -734,8 +750,8 @@ private void setDuplicateDecision(Context context, Item item, UUID duplicatedIte private List findDuplicationWithDecisions(Context context, Item item) { try { return dedupUtils.getAdminDuplicateByIdAndType(context, item.getID(), item.getType()).stream() - .filter(duplication -> isNotEmpty(duplication.getDecisionTypes())) - .collect(Collectors.toList()); + .filter(duplication -> isNotEmpty(duplication.getDecisionTypes())) + .collect(Collectors.toList()); } catch (SQLException | SearchServiceException e) { throw new RuntimeException(e); } diff --git a/dspace-api/src/main/java/org/dspace/app/deduplication/utils/DedupUtils.java b/dspace-api/src/main/java/org/dspace/app/deduplication/utils/DedupUtils.java index 97bf4a334652..b4c29d8780c9 100644 --- a/dspace-api/src/main/java/org/dspace/app/deduplication/utils/DedupUtils.java +++ b/dspace-api/src/main/java/org/dspace/app/deduplication/utils/DedupUtils.java @@ -47,12 +47,15 @@ import org.dspace.services.ConfigurationService; import org.dspace.util.ItemUtils; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; /** * Utility class used to search for duplicates inside the dedup solr core. * */ -public class DedupUtils { + +@Service +public class DedupUtils implements IDedupUtils { private static Logger log = LogManager.getLogger(DedupUtils.class); @@ -64,11 +67,14 @@ public class DedupUtils { @Autowired(required = true) protected ConfigurationService configurationService; - public DuplicateInfoList findSignatureWithDuplicate(Context context, String signatureType, int resourceType, - int limit, int offset, int rule) throws SearchServiceException, SQLException { + @Override + public Collection findSignatureWithDuplicate(Context context, String signatureType, int resourceType, + int limit, int offset, int rule) + throws SearchServiceException, SQLException { return findPotentialMatch(context, signatureType, resourceType, limit, offset, rule); } + @Override public Map countSignaturesWithDuplicates(String query, int resourceTypeId) throws SearchServiceException { Map results = new HashMap(); @@ -113,6 +119,7 @@ public Map countSignaturesWithDuplicates(String query, int reso return results; } + @Override public Map countSuggestedDuplicate(String query, int resourceTypeId) throws SearchServiceException { Map results = new HashMap(); @@ -241,8 +248,9 @@ private boolean hasStoredDecision(UUID firstItemID, UUID secondItemID, Duplicate return !response.getResults().isEmpty(); } + @Override public boolean matchExist(Context context, UUID itemID, UUID targetItemID, Integer resourceType, - String signatureType, Boolean isInWorkflow) throws SQLException, SearchServiceException { + String signatureType, Boolean isInWorkflow) throws SQLException, SearchServiceException { boolean exist = false; List potentialDuplicates = findDuplicate(context, itemID, resourceType, null, isInWorkflow); for (DuplicateItemInfo match : potentialDuplicates) { @@ -256,6 +264,7 @@ public boolean matchExist(Context context, UUID itemID, UUID targetItemID, Integ } + @Override public boolean rejectAdminDups(Context context, UUID firstId, UUID secondId, Integer type) throws SQLException, AuthorizeException { if (firstId == secondId) { @@ -309,6 +318,7 @@ public boolean rejectAdminDups(Context context, UUID firstId, UUID secondId, Int * @throws AuthorizeException * @throws SearchServiceException */ + @Override public boolean rejectAdminDups(Context context, UUID itemID, String signatureType, int resourceType) throws SQLException, AuthorizeException, SearchServiceException { @@ -336,6 +346,7 @@ public boolean rejectAdminDups(Context context, UUID itemID, String signatureTyp } + @Override public void rejectAdminDups(Context context, List items, String signatureID) throws SQLException, AuthorizeException, SearchServiceException { for (DSpaceObject item : items) { @@ -343,8 +354,9 @@ public void rejectAdminDups(Context context, List items, String si } } + @Override public void verify(Context context, int dedupId, UUID firstId, UUID secondId, int type, boolean toFix, String note, - boolean check) throws SQLException, AuthorizeException { + boolean check) throws SQLException, AuthorizeException { UUID[] sortedIds = new UUID[] { firstId, secondId }; Arrays.sort(sortedIds); firstId = sortedIds[0]; @@ -417,8 +429,9 @@ private Deduplication retrieveDuplicationRow(Context context, UUID firstId, UUID return row; } + @Override public void setDuplicateDecision(Context context, UUID firstId, UUID secondId, Integer type, - DuplicateDecisionObjectRest decisionObject) + DuplicateDecisionObjectRest decisionObject) throws AuthorizeException, SQLException, SearchServiceException { if (hasAuthorization(context, firstId, secondId)) { @@ -478,6 +491,7 @@ public void setDuplicateDecision(Context context, UUID firstId, UUID secondId, I } } + @Override public boolean validateDecision(DuplicateDecisionObjectRest decisionObject) { boolean valid = false; @@ -500,8 +514,9 @@ public boolean validateDecision(DuplicateDecisionObjectRest decisionObject) { return valid; } + @Override public boolean rejectDups(Context context, UUID firstId, UUID secondId, Integer type, boolean notDupl, String note, - boolean check) throws SQLException { + boolean check) throws SQLException { UUID[] sortedIds = new UUID[] { firstId, secondId }; Arrays.sort(sortedIds); Deduplication row = null; @@ -547,11 +562,9 @@ public boolean rejectDups(Context context, UUID firstId, UUID secondId, Integer return false; } - private DuplicateInfoList findPotentialMatch(Context context, String signatureType, int resourceType, int start, + private List findPotentialMatch(Context context, String signatureType, int resourceType, int start, int rows, int rule) throws SearchServiceException, SQLException { - DuplicateInfoList dil = new DuplicateInfoList(); - if (StringUtils.isNotEmpty(signatureType)) { if (!StringUtils.contains(signatureType, "_signature")) { signatureType += "_signature"; @@ -594,7 +607,7 @@ private DuplicateInfoList findPotentialMatch(Context context, String signatureTy FacetField facetField = responseFacet.getFacetField(signatureType); - List result = new ArrayList(); + List result = new ArrayList<>(); int index = 0; for (Count facetHit : facetField.getValues()) { @@ -653,10 +666,7 @@ private DuplicateInfoList findPotentialMatch(Context context, String signatureTy } index++; } - - dil.setDsi(result); - dil.setSize(facetField.getValues().size()); - return dil; + return result; } private DuplicateSignatureInfo findPotentialMatchByID(Context context, String signatureType, int resourceType, @@ -699,38 +709,45 @@ private DuplicateSignatureInfo findPotentialMatchByID(Context context, String si return dsi; } + @Override public DedupService getDedupService() { return dedupService; } + @Override public void setDedupService(DedupService dedupService) { this.dedupService = dedupService; } + @Override public void commit() { dedupService.commit(); } + @Override public List getDuplicateByIDandType(Context context, UUID itemID, int typeID, - boolean isInWorkflow) throws SQLException, SearchServiceException { + boolean isInWorkflow) + throws SQLException, SearchServiceException { return getDuplicateByIdAndTypeAndSignatureType(context, itemID, typeID, null, isInWorkflow); } + @Override public List getDuplicateByIdAndTypeAndSignatureType(Context context, UUID itemID, int typeID, - String signatureType, boolean isInWorkflow) throws SQLException, SearchServiceException { + String signatureType, boolean isInWorkflow) + throws SQLException, SearchServiceException { return findDuplicate(context, itemID, typeID, signatureType, isInWorkflow); } + @Override public List getAdminDuplicateByIdAndType(Context context, UUID itemID, int typeID) throws SQLException, SearchServiceException { return findDuplicate(context, itemID, typeID, null, null); } - public DuplicateInfoList findSuggestedDuplicate(Context context, int resourceType, int start, int rows) + @Override + public List findSuggestedDuplicate(Context context, int resourceType, int start, int rows) throws SearchServiceException, SQLException { - DuplicateInfoList dil = new DuplicateInfoList(); - SolrQuery solrQueryInternal = new SolrQuery(); solrQueryInternal.setQuery(SolrDedupServiceImpl.SUBQUERY_NOT_IN_REJECTED); @@ -774,8 +791,6 @@ public DuplicateInfoList findSuggestedDuplicate(Context context, int resourceTyp index++; } - dil.setDsi(result); - dil.setSize(solrDocumentList.getNumFound()); - return dil; + return result; } } diff --git a/dspace-api/src/main/java/org/dspace/app/deduplication/utils/DuplicateInfoList.java b/dspace-api/src/main/java/org/dspace/app/deduplication/utils/DuplicateInfoList.java deleted file mode 100644 index 3935944ffa77..000000000000 --- a/dspace-api/src/main/java/org/dspace/app/deduplication/utils/DuplicateInfoList.java +++ /dev/null @@ -1,34 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.deduplication.utils; - -import java.util.List; - -public class DuplicateInfoList { - - private long size; - - private List dsi; - - public long getSize() { - return size; - } - - public void setSize(long size) { - this.size = size; - } - - public List getDsi() { - return dsi; - } - - public void setDsi(List dsi) { - this.dsi = dsi; - } - -} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/deduplication/utils/IDedupUtils.java b/dspace-api/src/main/java/org/dspace/app/deduplication/utils/IDedupUtils.java new file mode 100644 index 000000000000..774735eaac3a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/deduplication/utils/IDedupUtils.java @@ -0,0 +1,79 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.deduplication.utils; + +import java.sql.SQLException; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.UUID; + +import org.dspace.app.deduplication.model.DuplicateDecisionObjectRest; +import org.dspace.app.deduplication.service.DedupService; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.DSpaceObject; +import org.dspace.core.Context; +import org.dspace.discovery.SearchServiceException; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public interface IDedupUtils { + Collection findSignatureWithDuplicate(Context context, String signatureType, int resourceType, + int limit, int offset, int rule) throws SearchServiceException, + SQLException; + + Map countSignaturesWithDuplicates(String query, int resourceTypeId) + throws SearchServiceException; + + Map countSuggestedDuplicate(String query, int resourceTypeId) + throws SearchServiceException; + + boolean matchExist(Context context, UUID itemID, UUID targetItemID, Integer resourceType, + String signatureType, Boolean isInWorkflow) throws SQLException, SearchServiceException; + + boolean rejectAdminDups(Context context, UUID firstId, UUID secondId, Integer type) + throws SQLException, AuthorizeException; + + boolean rejectAdminDups(Context context, UUID itemID, String signatureType, int resourceType) + throws SQLException, AuthorizeException, SearchServiceException; + + void rejectAdminDups(Context context, List items, String signatureID) + throws SQLException, AuthorizeException, SearchServiceException; + + void verify(Context context, int dedupId, UUID firstId, UUID secondId, int type, boolean toFix, String note, + boolean check) throws SQLException, AuthorizeException; + + void setDuplicateDecision(Context context, UUID firstId, UUID secondId, Integer type, + DuplicateDecisionObjectRest decisionObject) + throws AuthorizeException, SQLException, SearchServiceException; + + boolean validateDecision(DuplicateDecisionObjectRest decisionObject); + + boolean rejectDups(Context context, UUID firstId, UUID secondId, Integer type, boolean notDupl, String note, + boolean check) throws SQLException; + + DedupService getDedupService(); + + void setDedupService(DedupService dedupService); + + void commit(); + + List getDuplicateByIDandType(Context context, UUID itemID, int typeID, + boolean isInWorkflow) throws SQLException, SearchServiceException; + + List getDuplicateByIdAndTypeAndSignatureType(Context context, UUID itemID, int typeID, + String signatureType, boolean isInWorkflow) + throws SQLException, SearchServiceException; + + List getAdminDuplicateByIdAndType(Context context, UUID itemID, int typeID) + throws SQLException, SearchServiceException; + + Collection findSuggestedDuplicate(Context context, int resourceType, int start, int rows) + throws SearchServiceException, SQLException; +} diff --git a/dspace-api/src/main/java/org/dspace/app/deduplication/utils/MD5ValueSignature.java b/dspace-api/src/main/java/org/dspace/app/deduplication/utils/MD5ValueSignature.java index 8b047584bc1b..aacc7aa9ae0e 100644 --- a/dspace-api/src/main/java/org/dspace/app/deduplication/utils/MD5ValueSignature.java +++ b/dspace-api/src/main/java/org/dspace/app/deduplication/utils/MD5ValueSignature.java @@ -6,6 +6,7 @@ * http://www.dspace.org/license/ */ package org.dspace.app.deduplication.utils; + import java.io.UnsupportedEncodingException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; @@ -13,6 +14,7 @@ import java.util.List; import java.util.Locale; import java.util.Objects; +import java.util.stream.Collectors; import com.ibm.icu.text.CharsetDetector; import com.ibm.icu.text.CharsetMatch; @@ -22,12 +24,15 @@ import org.apache.logging.log4j.Logger; import org.dspace.content.DSpaceObject; import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; import org.dspace.content.MetadataValue; import org.dspace.content.WorkspaceItem; +import org.dspace.content.dto.MetadataValueDTO; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.ItemService; import org.dspace.content.service.WorkspaceItemService; import org.dspace.core.Context; +import org.dspace.external.model.ExternalDataObject; import org.dspace.workflow.WorkflowItem; import org.dspace.workflow.WorkflowItemService; import org.dspace.workflow.factory.WorkflowServiceFactory; @@ -95,6 +100,37 @@ public List getSignature(DSpaceObject item, Context context) { } } + public List getPlainSignature(DSpaceObject item, Context context) { + List result = new ArrayList(); + try { + MessageDigest digester = MessageDigest.getInstance("MD5"); + List values = getMultiValue(item, metadata); + if (values != null) { + for (String value : values) { + if (StringUtils.isNotEmpty(value)) { + String valueNorm = normalize(item, value); + digester.update(valueNorm.getBytes("UTF-8")); + byte[] signature = digester.digest(); + char[] arr = new char[signature.length << 1]; + for (int i = 0; i < signature.length; i++) { + int b = signature[i]; + int idx = i << 1; + arr[idx] = HEX_DIGITS[(b >> 4) & 0xf]; + arr[idx + 1] = HEX_DIGITS[b & 0xf]; + } + String sigString = new String(arr); + result.add(sigString); + } + } + } + return result; + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e.getMessage(), e); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + protected String normalize(DSpaceObject item, Context context, String value) { if (value != null) { String temp = StringUtils.EMPTY; @@ -210,6 +246,70 @@ protected List getMultiValue(DSpaceObject item, String metadata) { return retValue; } + public List getSignature(ExternalDataObject object) { + List result = new ArrayList(); + try { + MessageDigest digester = MessageDigest.getInstance("MD5"); + List values = getMultiValue(object, metadata); + if (values != null) { + for (String value : values) { + if (StringUtils.isNotEmpty(value)) { + String valueNorm = normalize(object, value); + digester.update(valueNorm.getBytes("UTF-8")); + byte[] signature = digester.digest(); + char[] arr = new char[signature.length << 1]; + for (int i = 0; i < signature.length; i++) { + int b = signature[i]; + int idx = i << 1; + arr[idx] = HEX_DIGITS[(b >> 4) & 0xf]; + arr[idx + 1] = HEX_DIGITS[b & 0xf]; + } + String sigString = new String(arr); + result.add(sigString); + } + } + } + return result; + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e.getMessage(), e); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + protected List getMultiValue(ExternalDataObject object, String metadata) { + return object.getMetadata() + .stream() + .filter(metadataValueDTO -> + new MetadataFieldName(metadataValueDTO.getSchema(), metadataValueDTO.getElement(), + metadataValueDTO.getQualifier()).toString().equals(metadata)) + .map(MetadataValueDTO::getValue) + .collect(Collectors.toList()); + } + + protected String normalize(ExternalDataObject object, String value) { + String result = value; + if (StringUtils.isEmpty(value)) { + if (StringUtils.isNotEmpty(prefix)) { + result = prefix + object.getId(); + } else { + result = "entity:" + object.getId(); + } + } else { + for (String prefix : ignorePrefix) { + if (value.startsWith(prefix)) { + result = value.substring(prefix.length()); + break; + } + } + if (StringUtils.isNotEmpty(prefix)) { + result = prefix + result; + } + } + + return result; + } + public String getMetadata() { return metadata; } diff --git a/dspace-api/src/main/java/org/dspace/app/deduplication/utils/Signature.java b/dspace-api/src/main/java/org/dspace/app/deduplication/utils/Signature.java index 2bf662b39d75..81a0fb228911 100644 --- a/dspace-api/src/main/java/org/dspace/app/deduplication/utils/Signature.java +++ b/dspace-api/src/main/java/org/dspace/app/deduplication/utils/Signature.java @@ -11,10 +11,15 @@ import org.dspace.content.DSpaceObject; import org.dspace.core.Context; +import org.dspace.external.model.ExternalDataObject; public interface Signature { public List getSignature(/* BrowsableDSpaceObject */DSpaceObject item, Context context); + public List getPlainSignature(DSpaceObject item, Context context); + + public List getSignature(ExternalDataObject object); + public int getResourceTypeID(); public String getSignatureType(); diff --git a/dspace-api/src/main/java/org/dspace/app/filetype/consumer/FileTypeMetadataEnhancerConsumer.java b/dspace-api/src/main/java/org/dspace/app/filetype/consumer/FileTypeMetadataEnhancerConsumer.java new file mode 100644 index 000000000000..b5c51e93e766 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/filetype/consumer/FileTypeMetadataEnhancerConsumer.java @@ -0,0 +1,274 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.filetype.consumer; + +import static org.dspace.util.FunctionalUtils.throwingConsumerWrapper; +import static org.dspace.util.FunctionalUtils.throwingMapperWrapper; + +import java.sql.SQLException; +import java.text.MessageFormat; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.apache.commons.codec.binary.StringUtils; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.MetadataField; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.MetadataValue; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.core.exception.SQLRuntimeException; +import org.dspace.event.Consumer; +import org.dspace.event.Event; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class FileTypeMetadataEnhancerConsumer implements Consumer { + + private static final Logger logger = LoggerFactory.getLogger(FileTypeMetadataEnhancerConsumer.class); + + protected static final MetadataFieldName entityTypeMetadata = new MetadataFieldName("dc", "type"); + protected static final MetadataFieldName fileTypeMetadata = new MetadataFieldName("dspace", "file", "type"); + private static final List itemMetadatas = List.of(fileTypeMetadata); + private static final List bitstreamMetadatas = List.of(entityTypeMetadata); + private static final Map bitstreamToItemMetadatasMap = Map.of( + entityTypeMetadata.toString(), fileTypeMetadata + ); + + private BitstreamService bitstreamService; + private ItemService itemService; + + private Set bitstreamAlreadyProcessed = new HashSet<>(); + private Set itemsToProcess = new HashSet<>(); + + @Override + public void initialize() throws Exception { + this.bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); + this.itemService = ContentServiceFactory.getInstance().getItemService(); + } + + @Override + public void consume(Context ctx, Event event) throws Exception { + if (Constants.BITSTREAM == event.getSubjectType()) { + this.handleBitStreamConsumer( + ctx, + Optional.ofNullable((Bitstream) event.getObject(ctx)) + .orElse(this.loadBitstream(ctx, event)), + event + ); + } else if (Constants.ITEM == event.getSubjectType() && Event.CREATE == event.getEventType()) { + this.handleItemConsumer( + ctx, + Optional.ofNullable((Item) event.getObject(ctx)) + .orElse(this.loadItem(ctx, event)) + ); + } else { + logger.warn( + "Can't consume the DSPaceObject with id {}, only BITSTREAM and ITEMS'CREATION events are consumable!", + event.getSubjectID() + ); + } + } + + @Override + public void end(Context ctx) throws Exception { + bitstreamAlreadyProcessed.clear(); + this.itemsToProcess + .stream() + .forEach(item -> this.handleItemConsumer(ctx, item)); + itemsToProcess.clear(); + } + + @Override + public void finish(Context ctx) throws Exception {} + + private Bitstream loadBitstream(Context ctx, Event event) { + Bitstream found = null; + try { + found = this.bitstreamService.find(ctx, event.getSubjectID()); + } catch (SQLException e) { + logger.error("Error while retrieving the bitstream with ID: " + event.getSubjectID(), e); + throw new SQLRuntimeException("Error while retrieving the bitstream with ID: " + event.getSubjectID(), e); + } + return found; + } + + private Item loadItem(Context ctx, Event event) { + Item found = null; + try { + found = this.itemService.find(ctx, event.getSubjectID()); + } catch (SQLException e) { + logger.error("Error while retrieving the bitstream with ID: " + event.getSubjectID(), e); + throw new SQLRuntimeException("Error while retrieving the bitstream with ID: " + event.getSubjectID(), e); + } + return found; + } + + private void handleBitStreamConsumer(Context ctx, Bitstream bitstream, Event event) { + + if (bitstream == null || this.alreadyProcessed(bitstream)) { + return; + } + List bitstreamItems = List.of(); + try { + bitstreamItems = bitstream.getBundles() + .stream() + .filter(bundle -> "ORIGINAL".equals(bundle.getName())) + .map(Bundle::getItems) + .flatMap(Collection::stream) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + bitstreamAlreadyProcessed.add(bitstream); + bitstreamItems + .stream() + .forEach(item -> this.itemsToProcess.add(item)); + } + } + + private void handleItemConsumer(Context ctx, Item item) { + + if (item == null) { + return; + } + + try { + Item loadedItem = this.itemService.find(ctx, item.getID()); + Map> grouped = + Optional.ofNullable(loadedItem) + .map(i -> i.getBundles("ORIGINAL")) + .filter(bundles -> !bundles.isEmpty()) + .map(bundles -> bundles.get(0)) + .map(Bundle::getBitstreams) + .filter(bitstreams -> !bitstreams.isEmpty()) + .map(bitstreams -> getMetadatasForItem(ctx, bitstreams).collect(Collectors.toList())) + .map(metadatas -> groupByMetadataField(metadatas)) + .filter(metadatas -> !metadatas.isEmpty()) + .orElse(Map.of()); + + this.itemService.removeMetadataValues(ctx, loadedItem, getRemovableMetadatas(loadedItem)); + + grouped + .entrySet() + .stream() + .map(entry -> + Map.entry(bitstreamToItemMetadatasMap.get(entry.getKey().toString('.')), entry.getValue()) + ) + .filter(entry -> entry.getKey() != null) + .forEach( + throwingConsumerWrapper(entry -> + this.addMetadata( + ctx, + loadedItem, + entry.getKey(), + entry.getValue() + ) + ) + ); + + } catch (SQLException e) { + logger.error(MessageFormat.format("Error while processing item {}!", item.getID().toString()), e); + throw new SQLRuntimeException(e); + } + + } + + private void addMetadata(Context ctx, Item loadedItem, MetadataFieldName metadata, List value) + throws SQLException { + this.itemService.addMetadata( + ctx, + loadedItem, + metadata.schema, + metadata.element, + metadata.qualifier, + null, + value + ); + } + + private Stream getMetadatasForItem(Context ctx, List bitstreams) { + return bitstreams + .stream() + .map( + throwingMapperWrapper(bitstream -> + this.bitstreamService.find(ctx, bitstream.getID()), + null + ) + ) + .filter(Objects::nonNull) + .flatMap(bitstream -> filterBitstreamMetadatasForItem(bitstream)); + } + + private Stream filterBitstreamMetadatasForItem(Bitstream bitstream) { + return bitstream.getMetadata() + .stream() + .filter( + metadataFilter( + bitstreamMetadatas + ) + ); + } + + private Map> groupByMetadataField(List metadatas) { + return this.collectByGroupingMetadataFieldMappingValue(metadatas.stream()); + } + + private Map> collectByGroupingMetadataFieldMappingValue(Stream stream) { + return stream + .collect( + Collectors.groupingBy( + MetadataValue::getMetadataField, + Collectors.mapping(MetadataValue::getValue, Collectors.toList()) + ) + ); + } + + private boolean alreadyProcessed(Bitstream bitstream) { + return bitstreamAlreadyProcessed.contains(bitstream); + } + + private List getRemovableMetadatas(DSpaceObject dspaceObject) { + return dspaceObject + .getMetadata() + .stream() + .filter( + metadataFilter( + itemMetadatas + ) + ) + .collect(Collectors.toList()); + } + + private Predicate metadataFilter(List metadataFields) { + return metadata -> + metadataFields + .stream() + .filter(field -> + StringUtils.equals(field.schema, metadata.getSchema()) && + StringUtils.equals(field.element, metadata.getElement()) && + StringUtils.equals(field.qualifier, metadata.getQualifier()) + ) + .findFirst() + .isPresent(); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java index 982973e47c50..ff83c3ecb225 100644 --- a/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java @@ -7,18 +7,11 @@ */ package org.dspace.app.harvest; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; public class HarvestScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; private Class dspaceRunnableClass; @@ -32,13 +25,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } public Options getOptions() { Options options = new Options(); diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java index cf70120d27d3..b37df5f5ea59 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java @@ -7,14 +7,9 @@ */ package org.dspace.app.itemexport; -import java.sql.SQLException; - import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link ItemExport} script @@ -23,9 +18,6 @@ */ public class ItemExportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -38,15 +30,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { Options options = new Options(); diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java index 6870b94eee1d..b32de11f7a7f 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java @@ -11,6 +11,7 @@ import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; +import java.net.URL; import java.nio.file.Files; import java.sql.SQLException; import java.util.ArrayList; @@ -22,6 +23,7 @@ import org.apache.commons.cli.ParseException; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; +import org.apache.tika.Tika; import org.dspace.app.itemimport.factory.ItemImportServiceFactory; import org.dspace.app.itemimport.service.ItemImportService; import org.dspace.authorize.AuthorizeException; @@ -74,10 +76,12 @@ public class ItemImport extends DSpaceRunnable { protected boolean isQuiet = false; protected boolean commandLineCollections = false; protected boolean zip = false; + protected boolean remoteUrl = false; protected String zipfilename = null; + protected boolean zipvalid = false; protected boolean help = false; protected File workDir = null; - private File workFile = null; + protected File workFile = null; protected static final CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); @@ -233,10 +237,21 @@ public void internalRun() throws Exception { handler.logInfo("***End of Test Run***"); } } finally { - // clean work dir if (zip) { - FileUtils.deleteDirectory(new File(sourcedir)); - FileUtils.deleteDirectory(workDir); + // if zip file was valid then clean sourcedir + if (zipvalid && sourcedir != null && new File(sourcedir).exists()) { + FileUtils.deleteDirectory(new File(sourcedir)); + } + + // clean workdir + if (workDir != null && workDir.exists()) { + FileUtils.deleteDirectory(workDir); + } + + // conditionally clean workFile if import was done in the UI or via a URL and it still exists + if (workFile != null && workFile.exists()) { + workFile.delete(); + } } Date endTime = new Date(); @@ -253,6 +268,17 @@ public void internalRun() throws Exception { * @param context */ protected void validate(Context context) { + // check zip type: uploaded file or remote url + if (commandLine.hasOption('z')) { + zipfilename = commandLine.getOptionValue('z'); + } else if (commandLine.hasOption('u')) { + remoteUrl = true; + zipfilename = commandLine.getOptionValue('u'); + } + if (StringUtils.isBlank(zipfilename)) { + throw new UnsupportedOperationException("Must run with either name of zip file or url of zip file"); + } + if (command == null) { handler.logError("Must run with either add, replace, or remove (run with -h flag for details)"); throw new UnsupportedOperationException("Must run with either add, replace, or remove"); @@ -295,7 +321,6 @@ protected void process(Context context, ItemImportService itemImportService, handler.writeFilestream(context, MAPFILE_FILENAME, mapfileInputStream, MAPFILE_BITSTREAM_TYPE); } finally { mapFile.delete(); - workFile.delete(); } } @@ -306,17 +331,55 @@ protected void process(Context context, ItemImportService itemImportService, * @throws Exception */ protected void readZip(Context context, ItemImportService itemImportService) throws Exception { - Optional optionalFileStream = handler.getFileStream(context, zipfilename); - if (optionalFileStream.isPresent()) { + Optional optionalFileStream = Optional.empty(); + Optional validationFileStream = Optional.empty(); + if (!remoteUrl) { + // manage zip via upload + optionalFileStream = handler.getFileStream(context, zipfilename); + validationFileStream = handler.getFileStream(context, zipfilename); + } else { + // manage zip via remote url + optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + } + + if (validationFileStream.isPresent()) { + // validate zip file + if (validationFileStream.isPresent()) { + validateZip(validationFileStream.get()); + } + workFile = new File(itemImportService.getTempWorkDir() + File.separator + zipfilename + "-" + context.getCurrentUser().getID()); FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile); - workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR); - sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); } else { throw new IllegalArgumentException( "Error reading file, the file couldn't be found for filename: " + zipfilename); } + + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + + File.separator + context.getCurrentUser().getID()); + sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); + } + + /** + * Confirm that the zip file has the correct MIME type + * @param inputStream + */ + protected void validateZip(InputStream inputStream) { + Tika tika = new Tika(); + try { + String mimeType = tika.detect(inputStream); + if (mimeType.equals("application/zip")) { + zipvalid = true; + } else { + handler.logError("A valid zip file must be supplied. The provided file has mimetype: " + mimeType); + throw new UnsupportedOperationException("A valid zip file must be supplied"); + } + } catch (IOException e) { + throw new IllegalArgumentException( + "There was an error while reading the zip file: " + zipfilename); + } } /** @@ -356,7 +419,6 @@ protected void setMapFile() throws IOException { */ protected void setZip() { zip = true; - zipfilename = commandLine.getOptionValue('z'); } /** diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java index 35de7b443a97..98d2469b7155 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java @@ -8,10 +8,15 @@ package org.dspace.app.itemimport; import java.io.File; +import java.io.FileInputStream; +import java.io.InputStream; +import java.net.URL; import java.sql.SQLException; import java.util.List; +import java.util.Optional; import java.util.UUID; +import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.dspace.app.itemimport.service.ItemImportService; import org.dspace.content.Collection; @@ -62,7 +67,7 @@ protected void validate(Context context) { handler.logError("Must run with either add, replace, or remove (run with -h flag for details)"); throw new UnsupportedOperationException("Must run with either add, replace, or remove"); } else if ("add".equals(command) || "replace".equals(command)) { - if (sourcedir == null) { + if (!remoteUrl && sourcedir == null) { handler.logError("A source directory containing items must be set (run with -h flag for details)"); throw new UnsupportedOperationException("A source directory containing items must be set"); } @@ -96,10 +101,43 @@ protected void process(Context context, ItemImportService itemImportService, protected void readZip(Context context, ItemImportService itemImportService) throws Exception { // If this is a zip archive, unzip it first if (zip) { - workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR - + File.separator + context.getCurrentUser().getID()); - sourcedir = itemImportService.unzip( - new File(sourcedir + File.separator + zipfilename), workDir.getAbsolutePath()); + if (!remoteUrl) { + // confirm zip file exists + File myZipFile = new File(sourcedir + File.separator + zipfilename); + if ((!myZipFile.exists()) || (!myZipFile.isFile())) { + throw new IllegalArgumentException( + "Error reading file, the file couldn't be found for filename: " + zipfilename); + } + + // validate zip file + InputStream validationFileStream = new FileInputStream(myZipFile); + validateZip(validationFileStream); + + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + + File.separator + context.getCurrentUser().getID()); + sourcedir = itemImportService.unzip( + new File(sourcedir + File.separator + zipfilename), workDir.getAbsolutePath()); + } else { + // manage zip via remote url + Optional optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + if (optionalFileStream.isPresent()) { + // validate zip file via url + Optional validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + if (validationFileStream.isPresent()) { + validateZip(validationFileStream.get()); + } + + workFile = new File(itemImportService.getTempWorkDir() + File.separator + + zipfilename + "-" + context.getCurrentUser().getID()); + FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile); + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + + File.separator + context.getCurrentUser().getID()); + sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); + } else { + throw new IllegalArgumentException( + "Error reading file, the file couldn't be found for filename: " + zipfilename); + } + } } } @@ -120,6 +158,12 @@ protected void setZip() { zip = true; zipfilename = commandLine.getOptionValue('z'); } + + if (commandLine.hasOption('u')) { // remote url + zip = true; + remoteUrl = true; + zipfilename = commandLine.getOptionValue('u'); + } } @Override diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLIScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLIScriptConfiguration.java index d265cbf4a1d6..89abd7155b39 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLIScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLIScriptConfiguration.java @@ -37,6 +37,9 @@ public Options getOptions() { options.addOption(Option.builder("z").longOpt("zip") .desc("name of zip file") .hasArg().required(false).build()); + options.addOption(Option.builder("u").longOpt("url") + .desc("url of zip file") + .hasArg().build()); options.addOption(Option.builder("c").longOpt("collection") .desc("destination collection(s) Handle or database ID") .hasArg().required(false).build()); diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java index a3149040c49b..3f2675ea58f1 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java @@ -8,14 +8,10 @@ package org.dspace.app.itemimport; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link ItemImport} script @@ -24,9 +20,6 @@ */ public class ItemImportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +32,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { Options options = new Options(); @@ -64,7 +48,10 @@ public Options getOptions() { options.addOption(Option.builder("z").longOpt("zip") .desc("name of zip file") .type(InputStream.class) - .hasArg().required().build()); + .hasArg().build()); + options.addOption(Option.builder("u").longOpt("url") + .desc("url of zip file") + .hasArg().build()); options.addOption(Option.builder("c").longOpt("collection") .desc("destination collection(s) Handle or database ID") .hasArg().required(false).build()); diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java index 076cc8ebe20e..255f4bdcbb15 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java @@ -774,6 +774,10 @@ protected Item addItem(Context c, List mycollections, String path, // put item in system if (!isTest) { try { + // Add provenance info + String provenance = installItemService.getSubmittedByProvenanceMessage(c, wi.getItem()); + itemService.addMetadata(c, wi.getItem(), MetadataSchemaEnum.DC.getName(), + "description", "provenance", "en", provenance); installItemService.installItem(c, wi, myhandle); } catch (Exception e) { workspaceItemService.deleteAll(c, wi); @@ -952,9 +956,10 @@ protected void addDCValue(Context c, Item i, String schema, Node n) String qualifier = getAttributeValue(n, "qualifier"); //NodeValue(); // //getElementData(n, // "qualifier"); - String language = getAttributeValue(n, "language"); - if (language != null) { - language = language.trim(); + + String language = null; + if (StringUtils.isNotBlank(getAttributeValue(n, "language"))) { + language = getAttributeValue(n, "language").trim(); } if (!isQuiet) { diff --git a/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java b/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java index e7d4745c952f..723ba91fb946 100644 --- a/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java +++ b/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java @@ -22,6 +22,7 @@ import org.dspace.core.Context; import org.dspace.eperson.EPerson; import org.dspace.scripts.DSpaceRunnable; +import org.dspace.scripts.DSpaceRunnable.StepResult; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.factory.ScriptServiceFactory; import org.dspace.scripts.handler.DSpaceRunnableHandler; @@ -165,8 +166,13 @@ public static int handleScript(String[] args, Document commandConfigs, private static int executeScript(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler, DSpaceRunnable script, EPerson currentUser) { try { - script.initialize(args, dSpaceRunnableHandler, currentUser); - script.run(); + StepResult result = script.initialize(args, dSpaceRunnableHandler, currentUser); + // check the StepResult, only run the script if the result is Continue; + // otherwise - for example the script is started with the help as argument, nothing is to do + if (StepResult.Continue.equals(result)) { + // runs the script, the normal initialization is successful + script.run(); + } return 0; } catch (ParseException e) { script.printHelp(); diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java index 467303c3cafd..afe1bb3d75df 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java @@ -22,7 +22,9 @@ public InputStream getDestinationStream(Item currentItem, InputStream source, bo File f2 = null; File f3 = null; try { - f2 = getImageFile(f, 0, verbose); + // Step 1: get an image from our PDF file, with PDF-specific processing options + f2 = getImageFile(f, verbose); + // Step 2: use the image above to create the final resized and rotated thumbnail f3 = getThumbnailFile(f2, verbose); byte[] bytes = Files.readAllBytes(f3.toPath()); return new ByteArrayInputStream(bytes); diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java index d16243e3e3bc..408982d157e5 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java @@ -116,9 +116,17 @@ public File getThumbnailFile(File f, boolean verbose) return f2; } - public File getImageFile(File f, int page, boolean verbose) + /** + * Return an image from a bitstream with specific processing options for + * PDFs. This is only used by ImageMagickPdfThumbnailFilter in order to + * generate an intermediate image file for use with getThumbnailFile. + */ + public File getImageFile(File f, boolean verbose) throws IOException, InterruptedException, IM4JavaException { - File f2 = new File(f.getParentFile(), f.getName() + ".jpg"); + // Writing an intermediate file to disk is inefficient, but since we're + // doing it anyway, we should use a lossless format. IM's internal MIFF + // is lossless like PNG and TIFF, but much faster. + File f2 = new File(f.getParentFile(), f.getName() + ".miff"); f2.deleteOnExit(); ConvertCmd cmd = new ConvertCmd(); IMOperation op = new IMOperation(); @@ -155,7 +163,7 @@ public File getImageFile(File f, int page, boolean verbose) op.define("pdf:use-cropbox=true"); } - String s = "[" + page + "]"; + String s = "[0]"; op.addImage(f.getAbsolutePath() + s); if (configurationService.getBooleanProperty(PRE + ".flatten", true)) { op.flatten(); @@ -208,20 +216,20 @@ public boolean preProcessBitstream(Context c, Item item, Bitstream source, boole if (description != null) { if (replaceRegex.matcher(description).matches()) { if (verbose) { - System.out.format("%s %s matches pattern and is replacable.%n", - description, nsrc); + System.out.format("%s %s matches pattern and is replaceable.%n", + description, n); } continue; } if (description.equals(getDescription())) { if (verbose) { System.out.format("%s %s is replaceable.%n", - getDescription(), nsrc); + getDescription(), n); } continue; } } - System.out.format("Custom Thumbnail exists for %s for item %s. Thumbnail will not be generated.%n", + System.out.format("Custom thumbnail exists for %s for item %s. Thumbnail will not be generated.%n", nsrc, item.getHandle()); return false; } diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java new file mode 100644 index 000000000000..4221a514d7d5 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java @@ -0,0 +1,76 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.mediafilter; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; + +import org.dspace.content.Item; +import org.im4java.core.ConvertCmd; +import org.im4java.core.IM4JavaException; +import org.im4java.core.IMOperation; + + +/** + * Filter video bitstreams, scaling the image to be within the bounds of + * thumbnail.maxwidth, thumbnail.maxheight, the size we want our thumbnail to be + * no bigger than. Creates only JPEGs. + */ +public class ImageMagickVideoThumbnailFilter extends ImageMagickThumbnailFilter { + private static final int DEFAULT_WIDTH = 180; + private static final int DEFAULT_HEIGHT = 120; + private static final int FRAME_NUMBER = 100; + + /** + * @param currentItem item + * @param source source input stream + * @param verbose verbose mode + * @return InputStream the resulting input stream + * @throws Exception if error + */ + @Override + public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose) + throws Exception { + File f = inputStreamToTempFile(source, "imthumb", ".tmp"); + File f2 = null; + try { + f2 = getThumbnailFile(f, verbose); + byte[] bytes = Files.readAllBytes(f2.toPath()); + return new ByteArrayInputStream(bytes); + } finally { + //noinspection ResultOfMethodCallIgnored + f.delete(); + if (f2 != null) { + //noinspection ResultOfMethodCallIgnored + f2.delete(); + } + } + } + + @Override + public File getThumbnailFile(File f, boolean verbose) + throws IOException, InterruptedException, IM4JavaException { + File f2 = new File(f.getParentFile(), f.getName() + ".jpg"); + f2.deleteOnExit(); + ConvertCmd cmd = new ConvertCmd(); + IMOperation op = new IMOperation(); + op.autoOrient(); + op.addImage("VIDEO:" + f.getAbsolutePath() + "[" + FRAME_NUMBER + "]"); + op.thumbnail(configurationService.getIntProperty("thumbnail.maxwidth", DEFAULT_WIDTH), + configurationService.getIntProperty("thumbnail.maxheight", DEFAULT_HEIGHT)); + op.addImage(f2.getAbsolutePath()); + if (verbose) { + System.out.println("IM Thumbnail Param: " + op); + } + cmd.run(op); + return f2; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java index 26347c56ee96..7465fa6e1279 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java @@ -7,25 +7,16 @@ */ package org.dspace.app.mediafilter; -import java.sql.SQLException; - import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; public class MediaFilterScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins"; - @Override public Class getDspaceRunnableClass() { return dspaceRunnableClass; @@ -36,23 +27,14 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - - @Override - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { Options options = new Options(); options.addOption("v", "verbose", false, "print all extracted text and other details to STDOUT"); options.addOption("q", "quiet", false, "do not print anything except in the event of errors."); options.addOption("f", "force", false, "force all bitstreams to be processed"); - options.addOption("i", "identifier", true, "ONLY process bitstreams belonging to identifier"); + options.addOption("i", "identifier", true, + "ONLY process bitstreams belonging to the provided handle identifier"); options.addOption("m", "maximum", true, "process no more than maximum items"); options.addOption("h", "help", false, "help"); diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java index 6b7f833e6dde..974dc784bd4f 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java @@ -8,13 +8,18 @@ package org.dspace.app.mediafilter; import java.io.InputStream; +import java.sql.SQLException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; +import org.apache.commons.lang3.StringUtils; import org.dspace.app.mediafilter.service.MediaFilterService; +import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Bitstream; import org.dspace.content.BitstreamFormat; @@ -36,6 +41,7 @@ import org.dspace.eperson.service.GroupService; import org.dspace.scripts.handler.DSpaceRunnableHandler; import org.dspace.services.ConfigurationService; +import org.dspace.util.ThrowableUtils; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; @@ -126,12 +132,18 @@ public void applyFiltersAllItems(Context context) throws Exception { @Override public void applyFiltersCommunity(Context context, Community community) throws Exception { //only apply filters if community not in skip-list + // ensure that the community is attached to the current hibernate session + // as we are committing after each item (handles, sub-communties and + // collections are lazy attributes) + community = context.reloadEntity(community); if (!inSkipList(community.getHandle())) { List subcommunities = community.getSubcommunities(); for (Community subcommunity : subcommunities) { applyFiltersCommunity(context, subcommunity); } - + // ensure that the community is attached to the current hibernate session + // as we are committing after each item + community = context.reloadEntity(community); List collections = community.getCollections(); for (Collection collection : collections) { applyFiltersCollection(context, collection); @@ -142,6 +154,9 @@ public void applyFiltersCommunity(Context context, Community community) @Override public void applyFiltersCollection(Context context, Collection collection) throws Exception { + // ensure that the collection is attached to the current hibernate session + // as we are committing after each item (handles are lazy attributes) + collection = context.reloadEntity(collection); //only apply filters if collection not in skip-list if (!inSkipList(collection.getHandle())) { Iterator itemIterator = itemService.findAllByCollection(context, collection); @@ -165,6 +180,7 @@ public void applyFiltersItem(Context c, Item item) throws Exception { } // clear item objects from context cache and internal cache c.uncacheEntity(currentItem); + c.commit(); currentItem = null; } } @@ -221,23 +237,9 @@ public boolean filterBitstream(Context context, Item myItem, filtered = true; } } catch (Exception e) { - String handle = myItem.getHandle(); - List bundles = myBitstream.getBundles(); - long size = myBitstream.getSizeBytes(); - String checksum = myBitstream.getChecksum() + " (" + myBitstream.getChecksumAlgorithm() + ")"; - int assetstore = myBitstream.getStoreNumber(); - // Printout helpful information to find the errored bitstream. - StringBuilder sb = new StringBuilder("ERROR filtering, skipping bitstream:\n"); - sb.append("\tItem Handle: ").append(handle); - for (Bundle bundle : bundles) { - sb.append("\tBundle Name: ").append(bundle.getName()); - } - sb.append("\tFile Size: ").append(size); - sb.append("\tChecksum: ").append(checksum); - sb.append("\tAsset Store: ").append(assetstore); - logError(sb.toString()); - logError(e.getMessage(), e); + logError(formatBitstreamDetails(myItem.getHandle(), myBitstream)); + logError(ThrowableUtils.formatCauseChain(e)); } } else if (filterClass instanceof SelfRegisterInputFormats) { // Filter implements self registration, so check to see if it should be applied @@ -315,25 +317,25 @@ public boolean processBitstream(Context context, Item item, Bitstream source, Fo // check if destination bitstream exists Bundle existingBundle = null; - Bitstream existingBitstream = null; + List existingBitstreams = new ArrayList<>(); List bundles = itemService.getBundles(item, formatFilter.getBundleName()); - if (bundles.size() > 0) { - // only finds the last match (FIXME?) + if (!bundles.isEmpty()) { + // only finds the last matching bundle and all matching bitstreams in the proper bundle(s) for (Bundle bundle : bundles) { List bitstreams = bundle.getBitstreams(); for (Bitstream bitstream : bitstreams) { if (bitstream.getName().trim().equals(newName.trim())) { existingBundle = bundle; - existingBitstream = bitstream; + existingBitstreams.add(bitstream); } } } } // if exists and overwrite = false, exit - if (!overWrite && (existingBitstream != null)) { + if (!overWrite && (!existingBitstreams.isEmpty())) { if (!isQuiet) { logInfo("SKIPPED: bitstream " + source.getID() + " (item: " + item.getHandle() + ") because '" + newName + "' already exists"); @@ -366,7 +368,7 @@ public boolean processBitstream(Context context, Item item, Bitstream source, Fo } Bundle targetBundle; // bundle we're modifying - if (bundles.size() < 1) { + if (bundles.isEmpty()) { // create new bundle if needed targetBundle = bundleService.create(context, item, formatFilter.getBundleName()); } else { @@ -388,29 +390,18 @@ public boolean processBitstream(Context context, Item item, Bitstream source, Fo bitstreamService.update(context, b); //Set permissions on the derivative bitstream - //- First remove any existing policies - authorizeService.removeAllPolicies(context, b); - - //- Determine if this is a public-derivative format - if (publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) { - //- Set derivative bitstream to be publicly accessible - Group anonymous = groupService.findByName(context, Group.ANONYMOUS); - authorizeService.addPolicy(context, b, Constants.READ, anonymous); - } else { - //- replace the policies using the same in the source bitstream - authorizeService.replaceAllPolicies(context, source, b); - } + updatePoliciesOfDerivativeBitstream(context, b, formatFilter, source); //do post-processing of the generated bitstream formatFilter.postProcessBitstream(context, item, b); } catch (OutOfMemoryError oome) { logError("!!! OutOfMemoryError !!!"); + logError(formatBitstreamDetails(item.getHandle(), source)); } - // fixme - set date? // we are overwriting, so remove old bitstream - if (existingBitstream != null) { + for (Bitstream existingBitstream : existingBitstreams) { bundleService.removeBitstream(context, existingBundle, existingBitstream); } @@ -422,6 +413,71 @@ public boolean processBitstream(Context context, Item item, Bitstream source, Fo return true; } + @Override + public void updatePoliciesOfDerivativeBitstreams(Context context, Item item, Bitstream source) + throws SQLException, AuthorizeException { + + if (filterClasses == null) { + return; + } + + for (FormatFilter formatFilter : filterClasses) { + for (Bitstream bitstream : findDerivativeBitstreams(item, source, formatFilter)) { + updatePoliciesOfDerivativeBitstream(context, bitstream, formatFilter, source); + } + } + } + + /** + * find derivative bitstreams related to source bitstream + * + * @param item item containing bitstreams + * @param source source bitstream + * @param formatFilter formatFilter + * @return list of derivative bitstreams from source bitstream + * @throws SQLException If something goes wrong in the database + */ + private List findDerivativeBitstreams(Item item, Bitstream source, FormatFilter formatFilter) + throws SQLException { + + String bitstreamName = formatFilter.getFilteredName(source.getName()); + List bundles = itemService.getBundles(item, formatFilter.getBundleName()); + + return bundles.stream() + .flatMap(bundle -> + bundle.getBitstreams().stream()) + .filter(bitstream -> + StringUtils.equals(bitstream.getName().trim(), bitstreamName.trim())) + .collect(Collectors.toList()); + } + + /** + * update resource polices of derivative bitstreams. + * by remove all resource policies and + * set derivative bitstreams to be publicly accessible or + * replace derivative bitstreams policies using + * the same in the source bitstream. + * + * @param context the context + * @param bitstream derivative bitstream + * @param formatFilter formatFilter + * @param source the source bitstream + * @throws SQLException If something goes wrong in the database + * @throws AuthorizeException if authorization error + */ + private void updatePoliciesOfDerivativeBitstream(Context context, Bitstream bitstream, FormatFilter formatFilter, + Bitstream source) throws SQLException, AuthorizeException { + + authorizeService.removeAllPolicies(context, bitstream); + + if (publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) { + Group anonymous = groupService.findByName(context, Group.ANONYMOUS); + authorizeService.addPolicy(context, bitstream, Constants.READ, anonymous); + } else { + authorizeService.replaceAllPolicies(context, source, bitstream); + } + } + @Override public Item getCurrentItem() { return currentItem; @@ -439,6 +495,37 @@ public boolean inSkipList(String identifier) { } } + /** + * Describe a Bitstream in detail. Format a single line of text with + * information such as Bitstore index, backing file ID, size, checksum, + * enclosing Item and Bundles. + * + * @param itemHandle Handle of the Item by which we found the Bitstream. + * @param bitstream the Bitstream to be described. + * @return Bitstream details. + */ + private String formatBitstreamDetails(String itemHandle, + Bitstream bitstream) { + List bundles; + try { + bundles = bitstream.getBundles(); + } catch (SQLException ex) { + logError("Unexpected error fetching Bundles", ex); + bundles = Collections.EMPTY_LIST; + } + StringBuilder sb = new StringBuilder("ERROR filtering, skipping bitstream:\n"); + sb.append("\tItem Handle: ").append(itemHandle); + for (Bundle bundle : bundles) { + sb.append("\tBundle Name: ").append(bundle.getName()); + } + sb.append("\tFile Size: ").append(bitstream.getSizeBytes()); + sb.append("\tChecksum: ").append(bitstream.getChecksum()) + .append(" (").append(bitstream.getChecksumAlgorithm()).append(')'); + sb.append("\tAsset Store: ").append(bitstream.getStoreNumber()); + sb.append("\tInternal ID: ").append(bitstream.getInternalId()); + return sb.toString(); + } + private void logInfo(String message) { if (handler != null) { handler.logInfo(message); diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/service/MediaFilterService.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/service/MediaFilterService.java index 50a6bb3a2027..bc92ff521098 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/service/MediaFilterService.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/service/MediaFilterService.java @@ -7,10 +7,12 @@ */ package org.dspace.app.mediafilter.service; +import java.sql.SQLException; import java.util.List; import java.util.Map; import org.dspace.app.mediafilter.FormatFilter; +import org.dspace.authorize.AuthorizeException; import org.dspace.content.Bitstream; import org.dspace.content.Collection; import org.dspace.content.Community; @@ -91,6 +93,22 @@ public void applyFiltersCollection(Context context, Collection collection) public boolean processBitstream(Context context, Item item, Bitstream source, FormatFilter formatFilter) throws Exception; + /** + * update resource polices of derivative bitstreams + * related to source bitstream. + * set derivative bitstreams to be publicly accessible or + * replace derivative bitstreams policies using + * the same in the source bitstream. + * + * @param context context + * @param item item containing bitstreams + * @param source source bitstream + * @throws SQLException If something goes wrong in the database + * @throws AuthorizeException if authorization error + */ + public void updatePoliciesOfDerivativeBitstreams(Context context, Item item, Bitstream source) + throws SQLException, AuthorizeException; + /** * Return the item that is currently being processed/filtered * by the MediaFilterManager. diff --git a/dspace-api/src/main/java/org/dspace/app/metadata/export/DspaceExportMetadataSchemaException.java b/dspace-api/src/main/java/org/dspace/app/metadata/export/DspaceExportMetadataSchemaException.java new file mode 100644 index 000000000000..1f2cbd824a80 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/metadata/export/DspaceExportMetadataSchemaException.java @@ -0,0 +1,23 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public class DspaceExportMetadataSchemaException extends Exception { + + public DspaceExportMetadataSchemaException(Exception e) { + super(e); + } + + public DspaceExportMetadataSchemaException(String message, Exception e) { + super(message, e); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/metadata/export/MetadataSchemaExportCliScript.java b/dspace-api/src/main/java/org/dspace/app/metadata/export/MetadataSchemaExportCliScript.java new file mode 100644 index 000000000000..83b8e94330ba --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/metadata/export/MetadataSchemaExportCliScript.java @@ -0,0 +1,51 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export; + +import java.io.File; + +import org.apache.commons.cli.ParseException; +import org.dspace.core.Context; + +/** + * This script can be use to export a given {@code MetadataSchema} into its + * registry file, that respects the standard DTD / XSD DSpace xml registry. + *

+ * This script is supposed to work with the CLI (command-line-interface), + * it accepts only two parameters {@code -i -f } + * respectively representing: + *

    + *
  • {@code schema-id}: id of the schema to export
  • + *
  • {@code file-path}:full file path of the file that will contain the export
  • + *
      + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + * + */ +public class MetadataSchemaExportCliScript extends MetadataSchemaExportScript { + + protected String filename; + + @Override + public void setup() throws ParseException { + super.setup(); + filename = commandLine.getOptionValue('f'); + } + + @Override + protected File getExportedFile(Context context) throws DspaceExportMetadataSchemaException { + try { + File file = new File(filename); + return metadataSchemaExportService.exportMetadataSchemaToFile(context, metadataSchema, file); + } catch (DspaceExportMetadataSchemaException e) { + handler.logError("Problem occured while exporting the schema to file: " + filename, e); + throw e; + } + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/metadata/export/MetadataSchemaExportCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/metadata/export/MetadataSchemaExportCliScriptConfiguration.java new file mode 100644 index 000000000000..5adfa2a725fc --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/metadata/export/MetadataSchemaExportCliScriptConfiguration.java @@ -0,0 +1,34 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export; + +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + * + */ +public class MetadataSchemaExportCliScriptConfiguration + extends MetadataSchemaExportScriptConfiguration { + + @Override + public Options getOptions() { + Options options = super.getOptions(); + + options.addOption( + Option.builder("f").longOpt("file") + .desc("The temporary file-name to use") + .hasArg() + .build() + ); + + return options; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/metadata/export/MetadataSchemaExportScript.java b/dspace-api/src/main/java/org/dspace/app/metadata/export/MetadataSchemaExportScript.java new file mode 100644 index 000000000000..3b07722a4b13 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/metadata/export/MetadataSchemaExportScript.java @@ -0,0 +1,126 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export; + +import java.io.File; +import java.io.FileInputStream; +import java.sql.SQLException; +import java.text.MessageFormat; + +import org.apache.commons.cli.ParseException; +import org.dspace.app.metadata.export.service.MetadataExportServiceFactory; +import org.dspace.app.metadata.export.service.MetadataSchemaExportService; +import org.dspace.content.MetadataSchema; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.MetadataSchemaService; +import org.dspace.core.Context; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.services.factory.DSpaceServicesFactory; + +/** + * This script can be use to export a given {@code MetadataSchema} into its + * registry file, that respects the standard DTD / XSD DSpace xml registry. + *

      + * This script is supposed to work with the webapp, it accepts only one + * parameter {@code -i } representing the id of the schema that + * will be exported. + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public class MetadataSchemaExportScript + extends DSpaceRunnable> { + + protected static String REGISTRY_FILENAME_TEMPLATE = "{0}-types.xml"; + + protected MetadataSchemaService metadataSchemaService = + ContentServiceFactory.getInstance().getMetadataSchemaService(); + + protected MetadataSchemaExportService metadataSchemaExportService = + MetadataExportServiceFactory.getInstance().getMetadataSchemaExportService(); + + protected boolean help; + protected int id; + + protected MetadataSchema metadataSchema; + + @Override + public MetadataSchemaExportScriptConfiguration getScriptConfiguration() { + return DSpaceServicesFactory + .getInstance().getServiceManager() + .getServiceByName("export-schema", MetadataSchemaExportScriptConfiguration.class); + } + + @Override + public void setup() throws ParseException { + help = commandLine.hasOption('h'); + try { + id = Integer.parseInt(commandLine.getOptionValue('i')); + } catch (Exception e) { + handler.logError("Cannot parse the id argument ( " + id + " )! You should provide an integer!"); + throw new ParseException("Cannot parse the id argument ( " + id + " )! You should provide an integer!"); + } + } + + @Override + public void internalRun() throws Exception { + if (help) { + printHelp(); + return; + } + + Context context = new Context(); + try { + validate(context); + exportMetadataSchema(context); + } catch (Exception e) { + context.abort(); + throw e; + } + } + + private void validate(Context context) throws SQLException, ParseException { + metadataSchema = this.metadataSchemaService.find(context, id); + if (metadataSchema == null) { + handler.logError("Cannot find the metadata-schema with id: " + id); + throw new ParseException("Cannot find the metadata-schema with id: " + id); + } + } + + private void exportMetadataSchema(Context context) throws Exception { + handler.logInfo( + "Exporting the metadata-schema file for the schema " + metadataSchema.getName() + ); + try { + File tempFile = getExportedFile(context); + + handler.logInfo( + "Exported to file: " + tempFile.getAbsolutePath() + ); + + try (FileInputStream fis = new FileInputStream(tempFile)) { + handler.logInfo("Summarizing export ..."); + context.turnOffAuthorisationSystem(); + handler.writeFilestream( + context, getFilename(metadataSchema), fis, "application/xml", false + ); + context.restoreAuthSystemState(); + } + } catch (Exception e) { + handler.logError("Problem occured while exporting the schema!", e); + throw e; + } + } + + protected String getFilename(MetadataSchema ms) { + return MessageFormat.format(REGISTRY_FILENAME_TEMPLATE, ms.getName()); + } + + protected File getExportedFile(Context context) throws DspaceExportMetadataSchemaException { + return this.metadataSchemaExportService.exportMetadataSchemaToFile(context, metadataSchema); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/metadata/export/MetadataSchemaExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/metadata/export/MetadataSchemaExportScriptConfiguration.java new file mode 100644 index 000000000000..665dbe15567c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/metadata/export/MetadataSchemaExportScriptConfiguration.java @@ -0,0 +1,73 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export; + +import java.sql.SQLException; + +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.core.Context; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Configuration of the Script {@code MetadataSchemaExportScript} + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public class MetadataSchemaExportScriptConfiguration + extends ScriptConfiguration { + + @Autowired + private AuthorizeService authorizeService; + + private Class dspaceRunnableClass; + + @Override + public Class getDspaceRunnableClass() { + return this.dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + @Override + public boolean isAllowedToExecute(Context context) { + try { + return authorizeService.isAdmin(context); + } catch (SQLException e) { + throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + } + } + + @Override + public Options getOptions() { + Options options = new Options(); + + options.addOption( + Option.builder("i").longOpt("id") + .desc("Metadata schema id") + .hasArg() + .required() + .build() + ); + + options.addOption( + Option.builder("h").longOpt("help") + .desc("help") + .hasArg(false) + .required(false) + .build() + ); + + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/metadata/export/model/AbstractJaxbBuilder.java b/dspace-api/src/main/java/org/dspace/app/metadata/export/model/AbstractJaxbBuilder.java new file mode 100644 index 000000000000..925020a52631 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/metadata/export/model/AbstractJaxbBuilder.java @@ -0,0 +1,57 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export.model; + +import java.lang.reflect.InvocationTargetException; +import java.util.function.Function; +import javax.xml.bind.JAXBElement; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public abstract class AbstractJaxbBuilder { + + T object; + Class clazz; + + protected final ObjectFactory objectFactory = new ObjectFactory(); + + protected AbstractJaxbBuilder(Class clazz) { + this.clazz = clazz; + } + + protected T getObejct() { + if (object == null) { + try { + object = clazz.getDeclaredConstructor().newInstance(); + } catch (InstantiationException e) { + throw new RuntimeException(e); + } catch (IllegalAccessException e) { + throw new RuntimeException(e); + } catch (InvocationTargetException e) { + throw new RuntimeException(e); + } catch (NoSuchMethodException e) { + throw new RuntimeException(e); + } + } + return object; + } + + public T build() { + return object; + } + + protected void addChildElement(C value, Function> mapper) { + if (value == null) { + return; + } + addChildElement(mapper.apply(value)); + } + + protected abstract void addChildElement(JAXBElement v); +} diff --git a/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DcSchema.java b/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DcSchema.java new file mode 100644 index 000000000000..e0ad541bdb84 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DcSchema.java @@ -0,0 +1,80 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export.model; + +import java.util.ArrayList; +import java.util.List; +import javax.xml.bind.JAXBElement; +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElementRef; +import javax.xml.bind.annotation.XmlElementRefs; +import javax.xml.bind.annotation.XmlRootElement; +import javax.xml.bind.annotation.XmlType; + + +/** + *

      Classe Java per anonymous complex type. + * + *

      Il seguente frammento di schema specifica il contenuto previsto contenuto in questa classe. + * + *

      + * <complexType>
      + *   <complexContent>
      + *     <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
      + *       <choice maxOccurs="unbounded" minOccurs="0">
      + *         <element ref="{}name"/>
      + *         <element ref="{}namespace"/>
      + *       </choice>
      + *     </restriction>
      + *   </complexContent>
      + * </complexType>
      + * 
      + */ +@XmlAccessorType(XmlAccessType.FIELD) +@XmlType(name = "", propOrder = { + "nameOrNamespace" +}) +@XmlRootElement(name = "dc-schema") +public class DcSchema { + + @XmlElementRefs({ + @XmlElementRef(name = "name", type = JAXBElement.class, required = false), + @XmlElementRef(name = "namespace", type = JAXBElement.class, required = false) + }) + protected List> nameOrNamespace; + + /** + * Gets the value of the nameOrNamespace property. + * + *

      + * This accessor method returns a reference to the live list, + * not a snapshot. Therefore any modification you make to the + * returned list will be present inside the JAXB object. + * This is why there is not a set method for the nameOrNamespace property. + * + *

      + * For example, to add a new item, do as follows: + *

      +     *    getNameOrNamespace().add(newItem);
      +     * 
      + * + * + *

      + * Objects of the following type(s) are allowed in the list + * {@link JAXBElement }{@code <}{@link String }{@code >} + * {@link JAXBElement }{@code <}{@link String }{@code >} + */ + public List> getNameOrNamespace() { + if (nameOrNamespace == null) { + nameOrNamespace = new ArrayList>(); + } + return this.nameOrNamespace; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DcSchemaBuilder.java b/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DcSchemaBuilder.java new file mode 100644 index 000000000000..fe7144bda854 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DcSchemaBuilder.java @@ -0,0 +1,39 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export.model; + +import javax.xml.bind.JAXBElement; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public class DcSchemaBuilder extends AbstractJaxbBuilder { + + protected DcSchemaBuilder() { + super(DcSchema.class); + } + + public static DcSchemaBuilder createBuilder() { + return new DcSchemaBuilder(); + } + + public DcSchemaBuilder withName(String name) { + this.addChildElement(name, objectFactory::createName); + return this; + } + + public DcSchemaBuilder withNamespace(String namespace) { + this.addChildElement(namespace, objectFactory::createNamespace); + return this; + } + + @Override + protected void addChildElement(JAXBElement v) { + getObejct().getNameOrNamespace().add(v); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DcType.java b/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DcType.java new file mode 100644 index 000000000000..bff2fc77978a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DcType.java @@ -0,0 +1,86 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export.model; + +import java.util.ArrayList; +import java.util.List; +import javax.xml.bind.JAXBElement; +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElementRef; +import javax.xml.bind.annotation.XmlElementRefs; +import javax.xml.bind.annotation.XmlRootElement; +import javax.xml.bind.annotation.XmlType; + + +/** + *

      Classe Java per anonymous complex type. + * + *

      Il seguente frammento di schema specifica il contenuto previsto contenuto in questa classe. + * + *

      + * <complexType>
      + *   <complexContent>
      + *     <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
      + *       <choice maxOccurs="unbounded" minOccurs="0">
      + *         <element ref="{}schema"/>
      + *         <element ref="{}element"/>
      + *         <element ref="{}qualifier"/>
      + *         <element ref="{}scope_note"/>
      + *       </choice>
      + *     </restriction>
      + *   </complexContent>
      + * </complexType>
      + * 
      + */ +@XmlAccessorType(XmlAccessType.FIELD) +@XmlType(name = "", propOrder = { + "schemaOrElementOrQualifier" +}) +@XmlRootElement(name = "dc-type") +public class DcType { + + @XmlElementRefs({ + @XmlElementRef(name = "schema", type = JAXBElement.class, required = false), + @XmlElementRef(name = "element", type = JAXBElement.class, required = false), + @XmlElementRef(name = "qualifier", type = JAXBElement.class, required = false), + @XmlElementRef(name = "scope_note", type = JAXBElement.class, required = false) + }) + protected List> schemaOrElementOrQualifier; + + /** + * Gets the value of the schemaOrElementOrQualifier property. + * + *

      + * This accessor method returns a reference to the live list, + * not a snapshot. Therefore any modification you make to the + * returned list will be present inside the JAXB object. + * This is why there is not a set method for the schemaOrElementOrQualifier property. + * + *

      + * For example, to add a new item, do as follows: + *

      +     *    getSchemaOrElementOrQualifier().add(newItem);
      +     * 
      + * + * + *

      + * Objects of the following type(s) are allowed in the list + * {@link JAXBElement }{@code <}{@link String }{@code >} + * {@link JAXBElement }{@code <}{@link String }{@code >} + * {@link JAXBElement }{@code <}{@link String }{@code >} + * {@link JAXBElement }{@code <}{@link String }{@code >} + */ + public List> getSchemaOrElementOrQualifier() { + if (schemaOrElementOrQualifier == null) { + schemaOrElementOrQualifier = new ArrayList>(); + } + return this.schemaOrElementOrQualifier; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DcTypeBuilder.java b/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DcTypeBuilder.java new file mode 100644 index 000000000000..47fd64763ead --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DcTypeBuilder.java @@ -0,0 +1,49 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export.model; + +import javax.xml.bind.JAXBElement; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public class DcTypeBuilder extends AbstractJaxbBuilder { + + protected DcTypeBuilder() { + super(DcType.class); + } + + public static DcTypeBuilder createBuilder() { + return new DcTypeBuilder(); + } + + public DcTypeBuilder withSchema(String schema) { + addChildElement(schema, objectFactory::createSchema); + return this; + } + + public DcTypeBuilder withElement(String element) { + addChildElement(element, objectFactory::createElement); + return this; + } + + public DcTypeBuilder withQualifier(String qualifier) { + addChildElement(qualifier, objectFactory::createQualifier); + return this; + } + + public DcTypeBuilder withScopeNote(String scopeNote) { + addChildElement(scopeNote, objectFactory::createScopeNote); + return this; + } + + @Override + protected void addChildElement(JAXBElement v) { + getObejct().getSchemaOrElementOrQualifier().add(v); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DspaceDcTypes.java b/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DspaceDcTypes.java new file mode 100644 index 000000000000..4cba081a8a30 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DspaceDcTypes.java @@ -0,0 +1,82 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export.model; + +import java.util.ArrayList; +import java.util.List; +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlElements; +import javax.xml.bind.annotation.XmlRootElement; +import javax.xml.bind.annotation.XmlType; + + +/** + *

      Classe Java per anonymous complex type. + * + *

      Il seguente frammento di schema specifica il contenuto previsto contenuto in questa classe. + * + *

      + * <complexType>
      + *   <complexContent>
      + *     <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
      + *       <choice maxOccurs="unbounded" minOccurs="0">
      + *         <element ref="{}dspace-header"/>
      + *         <element ref="{}dc-schema"/>
      + *         <element ref="{}dc-type"/>
      + *       </choice>
      + *     </restriction>
      + *   </complexContent>
      + * </complexType>
      + * 
      + */ +@XmlAccessorType(XmlAccessType.FIELD) +@XmlType(name = "", propOrder = { + "dspaceHeaderOrDcSchemaOrDcType" +}) +@XmlRootElement(name = "dspace-dc-types") +public class DspaceDcTypes { + + @XmlElements({ + @XmlElement(name = "dspace-header", type = DspaceHeader.class), + @XmlElement(name = "dc-schema", type = DcSchema.class), + @XmlElement(name = "dc-type", type = DcType.class) + }) + protected List dspaceHeaderOrDcSchemaOrDcType; + + /** + * Gets the value of the dspaceHeaderOrDcSchemaOrDcType property. + * + *

      + * This accessor method returns a reference to the live list, + * not a snapshot. Therefore any modification you make to the + * returned list will be present inside the JAXB object. + * This is why there is not a set method for the dspaceHeaderOrDcSchemaOrDcType property. + * + *

      + * For example, to add a new item, do as follows: + *

      +     *    getDspaceHeaderOrDcSchemaOrDcType().add(newItem);
      +     * 
      + * + * + *

      + * Objects of the following type(s) are allowed in the list + * {@link DspaceHeader } + * {@link DcSchema } + * {@link DcType } + */ + public List getDspaceHeaderOrDcSchemaOrDcType() { + if (dspaceHeaderOrDcSchemaOrDcType == null) { + dspaceHeaderOrDcSchemaOrDcType = new ArrayList(); + } + return this.dspaceHeaderOrDcSchemaOrDcType; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DspaceDcTypesBuilder.java b/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DspaceDcTypesBuilder.java new file mode 100644 index 000000000000..1e4cdb83393c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DspaceDcTypesBuilder.java @@ -0,0 +1,59 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export.model; + +import java.util.Collection; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public class DspaceDcTypesBuilder { + + private DspaceDcTypes dcTypes; + + private final ObjectFactory objectFactory = new ObjectFactory(); + + private DspaceDcTypes getDcTypes() { + if (dcTypes == null) { + dcTypes = new DspaceDcTypes(); + } + return dcTypes; + } + + private DspaceDcTypesBuilder() { + } + + public static DspaceDcTypesBuilder createBuilder() { + return new DspaceDcTypesBuilder(); + } + + public DspaceDcTypesBuilder witheader(DspaceHeader header) { + this.getDcTypes().getDspaceHeaderOrDcSchemaOrDcType().add(header); + return this; + } + + public DspaceDcTypesBuilder withSchema(DcSchema schema) { + this.getDcTypes().getDspaceHeaderOrDcSchemaOrDcType().add(schema); + return this; + } + + public DspaceDcTypesBuilder withDcType(DcType dcType) { + this.getDcTypes().getDspaceHeaderOrDcSchemaOrDcType().add(dcType); + return this; + } + + public DspaceDcTypesBuilder withDcTypes(Collection dcTypes) { + this.getDcTypes().getDspaceHeaderOrDcSchemaOrDcType().addAll(dcTypes); + return this; + } + + public DspaceDcTypes build() { + return dcTypes; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DspaceHeader.java b/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DspaceHeader.java new file mode 100644 index 000000000000..151c8b28292d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DspaceHeader.java @@ -0,0 +1,92 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export.model; + +import java.util.ArrayList; +import java.util.List; +import javax.xml.bind.JAXBElement; +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElementRef; +import javax.xml.bind.annotation.XmlElementRefs; +import javax.xml.bind.annotation.XmlRootElement; +import javax.xml.bind.annotation.XmlType; + + +/** + *

      Classe Java per anonymous complex type. + * + *

      Il seguente frammento di schema specifica il contenuto previsto contenuto in questa classe. + * + *

      + * <complexType>
      + *   <complexContent>
      + *     <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
      + *       <choice maxOccurs="unbounded" minOccurs="0">
      + *         <element ref="{}title"/>
      + *         <element ref="{}contributor.author"/>
      + *         <element ref="{}contributor.editor"/>
      + *         <element ref="{}date.created"/>
      + *         <element ref="{}description"/>
      + *         <element ref="{}description.version"/>
      + *       </choice>
      + *     </restriction>
      + *   </complexContent>
      + * </complexType>
      + * 
      + */ +@XmlAccessorType(XmlAccessType.FIELD) +@XmlType(name = "", propOrder = { + "titleOrContributorAuthorOrContributorEditor" +}) +@XmlRootElement(name = "dspace-header") +public class DspaceHeader { + + @XmlElementRefs({ + @XmlElementRef(name = "title", type = JAXBElement.class, required = false), + @XmlElementRef(name = "contributor.author", type = JAXBElement.class, required = false), + @XmlElementRef(name = "contributor.editor", type = JAXBElement.class, required = false), + @XmlElementRef(name = "date.created", type = JAXBElement.class, required = false), + @XmlElementRef(name = "description", type = JAXBElement.class, required = false), + @XmlElementRef(name = "description.version", type = JAXBElement.class, required = false) + }) + protected List> titleOrContributorAuthorOrContributorEditor; + + /** + * Gets the value of the titleOrContributorAuthorOrContributorEditor property. + * + *

      + * This accessor method returns a reference to the live list, + * not a snapshot. Therefore any modification you make to the + * returned list will be present inside the JAXB object. + * This is why there is not a set method for the titleOrContributorAuthorOrContributorEditor property. + * + *

      + * For example, to add a new item, do as follows: + *

      +     *    getTitleOrContributorAuthorOrContributorEditor().add(newItem);
      +     * 
      + * + * + *

      + * Objects of the following type(s) are allowed in the list + * {@link JAXBElement }{@code <}{@link String }{@code >} + * {@link JAXBElement }{@code <}{@link String }{@code >} + * {@link JAXBElement }{@code <}{@link String }{@code >} + * {@link JAXBElement }{@code <}{@link String }{@code >} + * {@link JAXBElement }{@code <}{@link String }{@code >} + * {@link JAXBElement }{@code <}{@link String }{@code >} + */ + public List> getTitleOrContributorAuthorOrContributorEditor() { + if (titleOrContributorAuthorOrContributorEditor == null) { + titleOrContributorAuthorOrContributorEditor = new ArrayList>(); + } + return this.titleOrContributorAuthorOrContributorEditor; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DspaceHeaderBuilder.java b/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DspaceHeaderBuilder.java new file mode 100644 index 000000000000..fb4028a2057b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/metadata/export/model/DspaceHeaderBuilder.java @@ -0,0 +1,59 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export.model; + +import javax.xml.bind.JAXBElement; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public class DspaceHeaderBuilder extends AbstractJaxbBuilder { + + protected DspaceHeaderBuilder() { + super(DspaceHeader.class); + } + + public static DspaceHeaderBuilder createBuilder() { + return new DspaceHeaderBuilder(); + } + + public DspaceHeaderBuilder withTitle(String title) { + addChildElement(title, objectFactory::createTitle); + return this; + } + + public DspaceHeaderBuilder withContributorAuthor(String contributorAuthor) { + addChildElement(contributorAuthor, objectFactory::createContributorAuthor); + return this; + } + + public DspaceHeaderBuilder withContributorEditor(String contributorEditor) { + addChildElement(contributorEditor, objectFactory::createContributorEditor); + return this; + } + + public DspaceHeaderBuilder withDateCreated(String dateCreated) { + addChildElement(dateCreated, objectFactory::createDateCreated); + return this; + } + + public DspaceHeaderBuilder withDescription(String description) { + addChildElement(description, objectFactory::createDescription); + return this; + } + + public DspaceHeaderBuilder withDescriptionVersion(String descriptionVersion) { + addChildElement(descriptionVersion, objectFactory::createDescriptionVersion); + return this; + } + + @Override + protected void addChildElement(JAXBElement v) { + getObejct().getTitleOrContributorAuthorOrContributorEditor().add(v); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/metadata/export/model/ObjectFactory.java b/dspace-api/src/main/java/org/dspace/app/metadata/export/model/ObjectFactory.java new file mode 100644 index 000000000000..085e8af5f81b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/metadata/export/model/ObjectFactory.java @@ -0,0 +1,212 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export.model; + +import javax.xml.bind.JAXBElement; +import javax.xml.bind.annotation.XmlElementDecl; +import javax.xml.bind.annotation.XmlRegistry; +import javax.xml.namespace.QName; + + +/** + * This object contains factory methods for each + * Java content interface and Java element interface + * generated in the org.dspace.app.metadata.export.model package. + *

      An ObjectFactory allows you to programatically + * construct new instances of the Java representation + * for XML content. The Java representation of XML + * content can consist of schema derived interfaces + * and classes representing the binding of schema + * type definitions, element declarations and model + * groups. Factory methods for each of these are + * provided in this class. + */ +@XmlRegistry +public class ObjectFactory { + + private final static QName _Title_QNAME = new QName("", "title"); + private final static QName _ContributorAuthor_QNAME = new QName("", "contributor.author"); + private final static QName _ContributorEditor_QNAME = new QName("", "contributor.editor"); + private final static QName _DateCreated_QNAME = new QName("", "date.created"); + private final static QName _Description_QNAME = new QName("", "description"); + private final static QName _DescriptionVersion_QNAME = new QName("", "description.version"); + private final static QName _Name_QNAME = new QName("", "name"); + private final static QName _Namespace_QNAME = new QName("", "namespace"); + private final static QName _Schema_QNAME = new QName("", "schema"); + private final static QName _Element_QNAME = new QName("", "element"); + private final static QName _Qualifier_QNAME = new QName("", "qualifier"); + private final static QName _ScopeNote_QNAME = new QName("", "scope_note"); + + /** + * Create a new ObjectFactory that can be used to create new instances of schema derived classes for package: org + * .dspace.app.metadata.export.model + */ + public ObjectFactory() { + } + + /** + * Create an instance of {@link DspaceDcTypes } + */ + public DspaceDcTypes createDspaceDcTypes() { + return new DspaceDcTypes(); + } + + /** + * Create an instance of {@link DspaceHeader } + */ + public DspaceHeader createDspaceHeader() { + return new DspaceHeader(); + } + + /** + * Create an instance of {@link DcSchema } + */ + public DcSchema createDcSchema() { + return new DcSchema(); + } + + /** + * Create an instance of {@link DcType } + */ + public DcType createDcType() { + return new DcType(); + } + + /** + * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >} + * + * @param value Java instance representing xml element's value. + * @return the new instance of {@link JAXBElement }{@code <}{@link String }{@code >} + */ + @XmlElementDecl(namespace = "", name = "title") + public JAXBElement createTitle(String value) { + return new JAXBElement(_Title_QNAME, String.class, null, value); + } + + /** + * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >} + * + * @param value Java instance representing xml element's value. + * @return the new instance of {@link JAXBElement }{@code <}{@link String }{@code >} + */ + @XmlElementDecl(namespace = "", name = "contributor.author") + public JAXBElement createContributorAuthor(String value) { + return new JAXBElement(_ContributorAuthor_QNAME, String.class, null, value); + } + + /** + * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >} + * + * @param value Java instance representing xml element's value. + * @return the new instance of {@link JAXBElement }{@code <}{@link String }{@code >} + */ + @XmlElementDecl(namespace = "", name = "contributor.editor") + public JAXBElement createContributorEditor(String value) { + return new JAXBElement(_ContributorEditor_QNAME, String.class, null, value); + } + + /** + * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >} + * + * @param value Java instance representing xml element's value. + * @return the new instance of {@link JAXBElement }{@code <}{@link String }{@code >} + */ + @XmlElementDecl(namespace = "", name = "date.created") + public JAXBElement createDateCreated(String value) { + return new JAXBElement(_DateCreated_QNAME, String.class, null, value); + } + + /** + * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >} + * + * @param value Java instance representing xml element's value. + * @return the new instance of {@link JAXBElement }{@code <}{@link String }{@code >} + */ + @XmlElementDecl(namespace = "", name = "description") + public JAXBElement createDescription(String value) { + return new JAXBElement(_Description_QNAME, String.class, null, value); + } + + /** + * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >} + * + * @param value Java instance representing xml element's value. + * @return the new instance of {@link JAXBElement }{@code <}{@link String }{@code >} + */ + @XmlElementDecl(namespace = "", name = "description.version") + public JAXBElement createDescriptionVersion(String value) { + return new JAXBElement(_DescriptionVersion_QNAME, String.class, null, value); + } + + /** + * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >} + * + * @param value Java instance representing xml element's value. + * @return the new instance of {@link JAXBElement }{@code <}{@link String }{@code >} + */ + @XmlElementDecl(namespace = "", name = "name") + public JAXBElement createName(String value) { + return new JAXBElement(_Name_QNAME, String.class, null, value); + } + + /** + * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >} + * + * @param value Java instance representing xml element's value. + * @return the new instance of {@link JAXBElement }{@code <}{@link String }{@code >} + */ + @XmlElementDecl(namespace = "", name = "namespace") + public JAXBElement createNamespace(String value) { + return new JAXBElement(_Namespace_QNAME, String.class, null, value); + } + + /** + * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >} + * + * @param value Java instance representing xml element's value. + * @return the new instance of {@link JAXBElement }{@code <}{@link String }{@code >} + */ + @XmlElementDecl(namespace = "", name = "schema") + public JAXBElement createSchema(String value) { + return new JAXBElement(_Schema_QNAME, String.class, null, value); + } + + /** + * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >} + * + * @param value Java instance representing xml element's value. + * @return the new instance of {@link JAXBElement }{@code <}{@link String }{@code >} + */ + @XmlElementDecl(namespace = "", name = "element") + public JAXBElement createElement(String value) { + return new JAXBElement(_Element_QNAME, String.class, null, value); + } + + /** + * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >} + * + * @param value Java instance representing xml element's value. + * @return the new instance of {@link JAXBElement }{@code <}{@link String }{@code >} + */ + @XmlElementDecl(namespace = "", name = "qualifier") + public JAXBElement createQualifier(String value) { + return new JAXBElement(_Qualifier_QNAME, String.class, null, value); + } + + /** + * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >} + * + * @param value Java instance representing xml element's value. + * @return the new instance of {@link JAXBElement }{@code <}{@link String }{@code >} + */ + @XmlElementDecl(namespace = "", name = "scope_note") + public JAXBElement createScopeNote(String value) { + return new JAXBElement(_ScopeNote_QNAME, String.class, null, value); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/metadata/export/service/MetadataExportServiceFactory.java b/dspace-api/src/main/java/org/dspace/app/metadata/export/service/MetadataExportServiceFactory.java new file mode 100644 index 000000000000..3553cbcba2fd --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/metadata/export/service/MetadataExportServiceFactory.java @@ -0,0 +1,28 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export.service; + +import org.dspace.services.factory.DSpaceServicesFactory; + +/** + * Factory for the export services related to metadata-schema and metadata-fields. + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public abstract class MetadataExportServiceFactory { + + public static MetadataExportServiceFactory getInstance() { + return DSpaceServicesFactory + .getInstance().getServiceManager() + .getServiceByName("metadataExportServiceFactory", MetadataExportServiceFactory.class); + } + + public abstract MetadataSchemaExportService getMetadataSchemaExportService(); + public abstract MetadataFieldExportService getMetadataFieldExportService(); + +} diff --git a/dspace-api/src/main/java/org/dspace/app/metadata/export/service/MetadataExportServiceFactoryImpl.java b/dspace-api/src/main/java/org/dspace/app/metadata/export/service/MetadataExportServiceFactoryImpl.java new file mode 100644 index 000000000000..a69d5dfd0fde --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/metadata/export/service/MetadataExportServiceFactoryImpl.java @@ -0,0 +1,31 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export.service; + +import org.springframework.beans.factory.annotation.Autowired; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public class MetadataExportServiceFactoryImpl extends MetadataExportServiceFactory { + + @Autowired + private MetadataSchemaExportService metadataSchemaExportService; + @Autowired + private MetadataFieldExportService metadataFieldExportService; + + @Override + public MetadataSchemaExportService getMetadataSchemaExportService() { + return metadataSchemaExportService; + } + + @Override + public MetadataFieldExportService getMetadataFieldExportService() { + return metadataFieldExportService; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/metadata/export/service/MetadataFieldExportService.java b/dspace-api/src/main/java/org/dspace/app/metadata/export/service/MetadataFieldExportService.java new file mode 100644 index 000000000000..ace312885230 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/metadata/export/service/MetadataFieldExportService.java @@ -0,0 +1,35 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export.service; + +import java.sql.SQLException; +import java.util.List; + +import org.dspace.app.metadata.export.model.DcType; +import org.dspace.content.MetadataField; +import org.dspace.content.MetadataSchema; +import org.dspace.core.Context; + +/** + * Exports {@code MetadataField} into {@code DcType} + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public interface MetadataFieldExportService { + + /** + * Creates a one {@link DCType} for each {@link MetadataField} + * in the given {@link MetadataSchema}, and returns them in a list + * + * @param context + * @param metadataSchema + * @return + * @throws SQLException + */ + List exportMetadataFieldsBy(Context context, MetadataSchema metadataSchema) throws SQLException; +} diff --git a/dspace-api/src/main/java/org/dspace/app/metadata/export/service/MetadataFieldExportServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/metadata/export/service/MetadataFieldExportServiceImpl.java new file mode 100644 index 000000000000..1ace35f4e45d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/metadata/export/service/MetadataFieldExportServiceImpl.java @@ -0,0 +1,49 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export.service; + +import java.sql.SQLException; +import java.util.List; +import java.util.stream.Collectors; + +import org.dspace.app.metadata.export.model.DcType; +import org.dspace.app.metadata.export.model.DcTypeBuilder; +import org.dspace.content.MetadataField; +import org.dspace.content.MetadataSchema; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.MetadataFieldService; +import org.dspace.core.Context; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public class MetadataFieldExportServiceImpl implements MetadataFieldExportService { + + private MetadataFieldService metadataFieldService = + ContentServiceFactory.getInstance().getMetadataFieldService(); + + public List exportMetadataFieldsBy(Context context, MetadataSchema metadataSchema) throws SQLException { + return metadataFieldService + .findAllInSchema(context, metadataSchema) + .stream() + .map(this::toDcType) + .collect(Collectors.toList()); + } + + private DcType toDcType(MetadataField metadataField) { + return DcTypeBuilder + .createBuilder() + .withSchema(metadataField.getMetadataSchema().getName()) + .withElement(metadataField.getElement()) + .withQualifier(metadataField.getQualifier()) + .withScopeNote(metadataField.getScopeNote()) + .build(); + } + +} + diff --git a/dspace-api/src/main/java/org/dspace/app/metadata/export/service/MetadataSchemaExportService.java b/dspace-api/src/main/java/org/dspace/app/metadata/export/service/MetadataSchemaExportService.java new file mode 100644 index 000000000000..cd1f35e2ef9b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/metadata/export/service/MetadataSchemaExportService.java @@ -0,0 +1,68 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export.service; + +import java.io.File; +import java.sql.SQLException; + +import org.dspace.app.metadata.export.DspaceExportMetadataSchemaException; +import org.dspace.app.metadata.export.model.DspaceDcTypes; +import org.dspace.content.MetadataSchema; +import org.dspace.core.Context; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public interface MetadataSchemaExportService { + + /** + * Exports the given {@code schemaId} into a {@link DspaceDcTypes} entity + * + * @param context + * @param schemaId + * @return + * @throws SQLException + */ + DspaceDcTypes exportMetadataSchema(Context context, int schemaId) throws SQLException; + + /** + * Exports the given {@code metadataSchema} into a {@link DspaceDcTypes} entity + * + * @param context + * @param metadataSchema + * @return + * @throws SQLException + */ + DspaceDcTypes exportMetadataSchema(Context context, MetadataSchema metadataSchema) throws SQLException; + + /** + * Exports the given {@code metadataSchema} to a temporary {@code File}, + * that will respect the {@code registry} xml format of dspace + * + * @param context + * @param metadataSchema + * @return + * @throws DspaceExportMetadataSchemaException + */ + File exportMetadataSchemaToFile(Context context, MetadataSchema metadataSchema) + throws DspaceExportMetadataSchemaException; + + /** + * Exports the given {@code metadataSchema} to a target {@code File}, + * that will respect the {@code registry} xml format of dspace + * + * @param context + * @param metadataSchema + * @param file + * @return + * @throws DspaceExportMetadataSchemaException + */ + File exportMetadataSchemaToFile(Context context, MetadataSchema metadataSchema, File file) + throws DspaceExportMetadataSchemaException; + +} diff --git a/dspace-api/src/main/java/org/dspace/app/metadata/export/service/MetadataSchemaExportServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/metadata/export/service/MetadataSchemaExportServiceImpl.java new file mode 100644 index 000000000000..eea9a09f7970 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/metadata/export/service/MetadataSchemaExportServiceImpl.java @@ -0,0 +1,107 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export.service; + +import java.io.File; +import java.io.IOException; +import java.sql.SQLException; +import javax.xml.bind.JAXBContext; +import javax.xml.bind.JAXBException; +import javax.xml.bind.Marshaller; + +import org.dspace.app.metadata.export.DspaceExportMetadataSchemaException; +import org.dspace.app.metadata.export.model.DcSchema; +import org.dspace.app.metadata.export.model.DcSchemaBuilder; +import org.dspace.app.metadata.export.model.DspaceDcTypes; +import org.dspace.app.metadata.export.model.DspaceDcTypesBuilder; +import org.dspace.content.MetadataSchema; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.MetadataSchemaService; +import org.dspace.core.Context; + +/** + * This service can be used to export a target schema into a registry-file + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public class MetadataSchemaExportServiceImpl implements MetadataSchemaExportService { + + private MetadataSchemaService metadataSchemaService = + ContentServiceFactory.getInstance().getMetadataSchemaService(); + + @Override + public DspaceDcTypes exportMetadataSchema(Context context, int schemaId) throws SQLException { + return this.exportMetadataSchema(context, metadataSchemaService.find(context, schemaId)); + } + + @Override + public DspaceDcTypes exportMetadataSchema(Context context, MetadataSchema metadataSchema) throws SQLException { + return DspaceDcTypesBuilder + .createBuilder() + .withSchema(this.mapToDcSchema(metadataSchema)) + .withDcTypes( + MetadataExportServiceFactory.getInstance() + .getMetadataFieldExportService() + .exportMetadataFieldsBy(context, metadataSchema) + ) + .build(); + } + + @Override + public File exportMetadataSchemaToFile(Context context, MetadataSchema metadataSchema) + throws DspaceExportMetadataSchemaException { + File tempFile; + try { + tempFile = + File.createTempFile( + metadataSchema.getName() + "-" + metadataSchema.getID(), + ".xml" + ); + tempFile.deleteOnExit(); + return this.exportMetadataSchemaToFile(context, metadataSchema, tempFile); + } catch (IOException e) { + throw new DspaceExportMetadataSchemaException( + "Probelm occured during while exporting to temporary file!", + e + ); + } + } + + @Override + public File exportMetadataSchemaToFile(Context context, MetadataSchema metadataSchema, File file) + throws DspaceExportMetadataSchemaException { + try { + DspaceDcTypes dspaceDcTypes = this.exportMetadataSchema(context, metadataSchema); + + JAXBContext jaxb = JAXBContext.newInstance(DspaceDcTypes.class); + Marshaller jaxbMarshaller = jaxb.createMarshaller(); + jaxbMarshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE); + jaxbMarshaller.marshal(dspaceDcTypes, file); + } catch (SQLException e) { + throw new DspaceExportMetadataSchemaException( + "Problem occured while retrieving data from DB!", + e + ); + } catch (JAXBException e) { + throw new DspaceExportMetadataSchemaException( + "Problem occured during the export to XML file!", + e + ); + } + return file; + } + + private DcSchema mapToDcSchema(MetadataSchema metadataSchema) { + return DcSchemaBuilder + .createBuilder() + .withName(metadataSchema.getName()) + .withNamespace(metadataSchema.getNamespace()) + .build(); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/nbevent/service/impl/NBEventServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/nbevent/service/impl/NBEventServiceImpl.java index 4df11b054e67..e07fb16c72ca 100644 --- a/dspace-api/src/main/java/org/dspace/app/nbevent/service/impl/NBEventServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/nbevent/service/impl/NBEventServiceImpl.java @@ -8,14 +8,17 @@ package org.dspace.app.nbevent.service.impl; import java.io.IOException; +import java.sql.SQLException; import java.util.ArrayList; import java.util.Date; +import java.util.Iterator; import java.util.List; import java.util.UUID; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.json.JsonMapper; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.solr.client.solrj.SolrClient; @@ -33,6 +36,7 @@ import org.dspace.app.nbevent.NBTopic; import org.dspace.app.nbevent.dao.impl.NBEventsDaoImpl; import org.dspace.app.nbevent.service.NBEventService; +import org.dspace.authorize.AuthorizeException; import org.dspace.content.Item; import org.dspace.content.NBEvent; import org.dspace.content.service.ItemService; @@ -316,18 +320,38 @@ private String getResourceUUID(Context context, String originalId) throws Except String id = getHandleFromOriginalId(originalId); if (id != null) { Item item = (Item) handleService.resolveToObject(context, id); + if (item != null) { final String itemUuid = item.getID().toString(); context.uncacheEntity(item); return itemUuid; } else { - return null; + item = fromLegacyIdentifier(context, originalId); + return item == null ? null : item.getID().toString(); } } else { throw new RuntimeException("Malformed originalId " + originalId); } } + private Item fromLegacyIdentifier(Context context, String legacyIdentifier) { + if (StringUtils.isBlank(legacyIdentifier)) { + return null; + } + try { + Iterator + iterator = itemService.findUnfilteredByMetadataField( + context, "dspace", "legacy", "oai-identifier", + legacyIdentifier); + if (!iterator.hasNext()) { + return null; + } + return iterator.next(); + } catch (AuthorizeException | SQLException e) { + throw new RuntimeException(e); + } + } + // oai:www.openstarts.units.it:10077/21486 private String getHandleFromOriginalId(String originalId) { Integer startPosition = originalId.lastIndexOf(':'); diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java index 384f33decaf2..6499c45a7830 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java @@ -11,55 +11,59 @@ import java.io.IOException; import java.sql.SQLException; import java.util.List; +import javax.annotation.ManagedBean; +import javax.inject.Inject; +import javax.inject.Singleton; import javax.mail.MessagingException; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.dspace.app.requestitem.factory.RequestItemServiceFactory; import org.dspace.app.requestitem.service.RequestItemService; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.Item; -import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.BitstreamService; import org.dspace.core.Context; import org.dspace.core.Email; import org.dspace.core.I18nUtil; import org.dspace.core.LogHelper; import org.dspace.eperson.EPerson; -import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.service.HandleService; import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; /** * Send item requests and responses by email. * + *

      The "strategy" by which approvers are chosen is in an implementation of + * {@link RequestItemAuthorExtractor} which is injected by the name + * {@code requestItemAuthorExtractor}. See the DI configuration documents. + * * @author Mark H. Wood */ +@Singleton +@ManagedBean public class RequestItemEmailNotifier { private static final Logger LOG = LogManager.getLogger(); - private static final BitstreamService bitstreamService - = ContentServiceFactory.getInstance().getBitstreamService(); + @Inject + protected BitstreamService bitstreamService; - private static final ConfigurationService configurationService - = DSpaceServicesFactory.getInstance().getConfigurationService(); + @Inject + protected ConfigurationService configurationService; - private static final HandleService handleService - = HandleServiceFactory.getInstance().getHandleService(); + @Inject + protected HandleService handleService; - private static final RequestItemService requestItemService - = RequestItemServiceFactory.getInstance().getRequestItemService(); + @Inject + protected RequestItemService requestItemService; - private static final RequestItemAuthorExtractor requestItemAuthorExtractor - = DSpaceServicesFactory.getInstance() - .getServiceManager() - .getServiceByName("requestItemAuthorExtractor", - RequestItemAuthorExtractor.class); + protected final RequestItemAuthorExtractor requestItemAuthorExtractor; - private RequestItemEmailNotifier() {} + @Inject + public RequestItemEmailNotifier(RequestItemAuthorExtractor requestItemAuthorExtractor) { + this.requestItemAuthorExtractor = requestItemAuthorExtractor; + } /** * Send the request to the approver(s). @@ -70,7 +74,7 @@ private RequestItemEmailNotifier() {} * @throws IOException passed through. * @throws SQLException if the message was not sent. */ - static public void sendRequest(Context context, RequestItem ri, String responseLink) + public void sendRequest(Context context, RequestItem ri, String responseLink) throws IOException, SQLException { // Who is making this request? List authors = requestItemAuthorExtractor @@ -147,12 +151,38 @@ static public void sendRequest(Context context, RequestItem ri, String responseL * @param message email body (may be empty). * @throws IOException if sending failed. */ - static public void sendResponse(Context context, RequestItem ri, String subject, + public void sendResponse(Context context, RequestItem ri, String subject, String message) throws IOException { + // Who granted this request? + List grantors; + try { + grantors = requestItemAuthorExtractor.getRequestItemAuthor(context, ri.getItem()); + } catch (SQLException e) { + LOG.warn("Failed to get grantor's name and address: {}", e.getMessage()); + grantors = List.of(); + } + + String grantorName; + String grantorAddress; + if (grantors.isEmpty()) { + grantorName = configurationService.getProperty("mail.admin.name"); + grantorAddress = configurationService.getProperty("mail.admin"); + } else { + RequestItemAuthor grantor = grantors.get(0); // XXX Cannot know which one + grantorName = grantor.getFullName(); + grantorAddress = grantor.getEmail(); + } + // Build an email back to the requester. - Email email = new Email(); - email.setContent("body", message); + Email email = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(), + ri.isAccept_request() ? "request_item.granted" : "request_item.rejected")); + email.addArgument(ri.getReqName()); // {0} requestor's name + email.addArgument(handleService.getCanonicalForm(ri.getItem().getHandle())); // {1} URL of the requested Item + email.addArgument(ri.getItem().getName()); // {2} title of the requested Item + email.addArgument(grantorName); // {3} name of the grantor + email.addArgument(grantorAddress); // {4} email of the grantor + email.addArgument(message); // {5} grantor's optional message email.setSubject(subject); email.addRecipient(ri.getReqEmail()); // Attach bitstreams. @@ -167,17 +197,25 @@ static public void sendResponse(Context context, RequestItem ri, String subject, if (!bitstream.getFormat(context).isInternal() && requestItemService.isRestricted(context, bitstream)) { - email.addAttachment(bitstreamService.retrieve(context, - bitstream), bitstream.getName(), + // #8636 Anyone receiving the email can respond to the + // request without authenticating into DSpace + context.turnOffAuthorisationSystem(); + email.addAttachment( + bitstreamService.retrieve(context, bitstream), + bitstream.getName(), bitstream.getFormat(context).getMIMEType()); + context.restoreAuthSystemState(); } } } } else { Bitstream bitstream = ri.getBitstream(); + // #8636 Anyone receiving the email can respond to the request without authenticating into DSpace + context.turnOffAuthorisationSystem(); email.addAttachment(bitstreamService.retrieve(context, bitstream), bitstream.getName(), bitstream.getFormat(context).getMIMEType()); + context.restoreAuthSystemState(); } email.send(); } else { @@ -207,7 +245,7 @@ static public void sendResponse(Context context, RequestItem ri, String subject, * @throws IOException if the message body cannot be loaded or the message * cannot be sent. */ - static public void requestOpenAccess(Context context, RequestItem ri) + public void requestOpenAccess(Context context, RequestItem ri) throws IOException { Email message = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(), "request_item.admin")); diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/package-info.java b/dspace-api/src/main/java/org/dspace/app/requestitem/package-info.java index 5886f16fde1a..fa7c15b23060 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/package-info.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/package-info.java @@ -12,10 +12,15 @@ * e-mailed to a responsible party for consideration and action. Find details * in the user documentation under the rubric "Request a Copy". * - *

      This package includes several "strategy" classes which discover responsible - * parties in various ways. See {@link RequestItemSubmitterStrategy} and the - * classes which extend it. A strategy class must be configured and identified - * as {@link RequestItemAuthorExtractor} for injection into code which requires - * Request a Copy services. + *

      Mailing is handled by {@link RequestItemEmailNotifier}. Responsible + * parties are represented by {@link RequestItemAuthor} + * + *

      This package includes several "strategy" classes which discover + * responsible parties in various ways. See + * {@link RequestItemSubmitterStrategy} and the classes which extend it, and + * others which implement {@link RequestItemAuthorExtractor}. A strategy class + * must be configured and identified as {@link requestItemAuthorExtractor} + * (note capitalization) for injection into code which requires Request + * a Copy services. */ package org.dspace.app.requestitem; diff --git a/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java b/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java index 6188272aca47..90962d12aa75 100644 --- a/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java +++ b/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java @@ -7,20 +7,11 @@ */ package org.dspace.app.sitemap; -import java.io.BufferedReader; import java.io.File; import java.io.IOException; -import java.io.InputStreamReader; -import java.io.UnsupportedEncodingException; -import java.net.HttpURLConnection; -import java.net.MalformedURLException; -import java.net.URL; -import java.net.URLEncoder; import java.sql.SQLException; import java.util.Date; -import java.util.Iterator; import java.util.List; -import java.util.Optional; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; @@ -30,13 +21,8 @@ import org.apache.commons.cli.ParseException; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.io.FileUtils; -import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; -import org.dspace.app.customurl.CustomUrlService; -import org.dspace.content.Collection; -import org.dspace.content.Community; -import org.dspace.content.Item; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; import org.dspace.content.service.CommunityService; @@ -45,12 +31,12 @@ import org.dspace.core.LogHelper; import org.dspace.discovery.DiscoverQuery; import org.dspace.discovery.DiscoverResult; +import org.dspace.discovery.IndexableObject; import org.dspace.discovery.SearchService; import org.dspace.discovery.SearchServiceException; import org.dspace.discovery.SearchUtils; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.dspace.utils.DSpace; /** * Command-line utility for generating HTML and Sitemaps.org protocol Sitemaps. @@ -71,8 +57,7 @@ public class GenerateSitemaps { private static final ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); private static final SearchService searchService = SearchUtils.getSearchService(); - - private static final CustomUrlService customUrlService = new DSpace().getSingletonService(CustomUrlService.class); + private static final int PAGE_SIZE = 100; /** * Default constructor @@ -92,11 +77,6 @@ public static void main(String[] args) throws Exception { "do not generate sitemaps.org protocol sitemap"); options.addOption("b", "no_htmlmap", false, "do not generate a basic HTML sitemap"); - options.addOption("a", "ping_all", false, - "ping configured search engines"); - options - .addOption("p", "ping", true, - "ping specified search engine URL"); options .addOption("d", "delete", false, "delete sitemaps dir and its contents"); @@ -121,14 +101,13 @@ public static void main(String[] args) throws Exception { } /* - * Sanity check -- if no sitemap generation or pinging to do, or deletion, print usage + * Sanity check -- if no sitemap generation or deletion, print usage */ if (line.getArgs().length != 0 || line.hasOption('d') || line.hasOption('b') && line.hasOption('s') && !line.hasOption('g') - && !line.hasOption('m') && !line.hasOption('y') - && !line.hasOption('p')) { + && !line.hasOption('m') && !line.hasOption('y')) { System.err - .println("Nothing to do (no sitemap to generate, no search engines to ping)"); + .println("Nothing to do (no sitemap to generate)"); hf.printHelp(usage, options); System.exit(1); } @@ -142,20 +121,6 @@ public static void main(String[] args) throws Exception { deleteSitemaps(); } - if (line.hasOption('a')) { - pingConfiguredSearchEngines(); - } - - if (line.hasOption('p')) { - try { - pingSearchEngine(line.getOptionValue('p')); - } catch (MalformedURLException me) { - System.err - .println("Bad search engine URL (include all except sitemap URL)"); - System.exit(1); - } - } - System.exit(0); } @@ -194,7 +159,10 @@ public static void deleteSitemaps() throws IOException { */ public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) throws SQLException, IOException { String uiURLStem = configurationService.getProperty("dspace.ui.url"); - String sitemapStem = uiURLStem + "/sitemap"; + if (!uiURLStem.endsWith("/")) { + uiURLStem = uiURLStem + '/'; + } + String sitemapStem = uiURLStem + "sitemap"; File outputDir = new File(configurationService.getProperty("sitemap.dir")); if (!outputDir.exists() && !outputDir.mkdir()) { @@ -213,186 +181,113 @@ public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) } Context c = new Context(Context.Mode.READ_ONLY); + int offset = 0; + long commsCount = 0; + long collsCount = 0; + long itemsCount = 0; - List comms = communityService.findAll(c); - - for (Community comm : comms) { - String url = uiURLStem + "/communities/" + comm.getID(); - - if (makeHTMLMap) { - html.addURL(url, null); - } - if (makeSitemapOrg) { - sitemapsOrg.addURL(url, null); - } - - c.uncacheEntity(comm); - } - - List colls = collectionService.findAll(c); - - for (Collection coll : colls) { - String url = uiURLStem + "/collections/" + coll.getID(); - - if (makeHTMLMap) { - html.addURL(url, null); - } - if (makeSitemapOrg) { - sitemapsOrg.addURL(url, null); - } - - c.uncacheEntity(coll); - } - - Iterator allItems = itemService.findAll(c); - int itemCount = 0; - - while (allItems.hasNext()) { - Item i = allItems.next(); - - Optional customUrl = customUrlService.getCustomUrl(i); - if (customUrl.isPresent()) { - - String url = uiURLStem + "/entities/" + StringUtils.lowerCase(itemService.getEntityTypeLabel(i)) - + "/" + customUrl.get(); - - if (makeHTMLMap) { - html.addURL(url, null); - } - if (makeSitemapOrg) { - sitemapsOrg.addURL(url, null); - } - - } - - DiscoverQuery entityQuery = new DiscoverQuery(); - entityQuery.setQuery("search.uniqueid:\"Item-" + i.getID() + "\" and entityType:*"); - entityQuery.addSearchField("entityType"); - - try { - DiscoverResult discoverResult = searchService.search(c, entityQuery); - - String url; - if (CollectionUtils.isNotEmpty(discoverResult.getIndexableObjects()) - && CollectionUtils.isNotEmpty(discoverResult.getSearchDocument( - discoverResult.getIndexableObjects().get(0)).get(0).getSearchFieldValues("entityType")) - && StringUtils.isNotBlank(discoverResult.getSearchDocument( - discoverResult.getIndexableObjects().get(0)).get(0).getSearchFieldValues("entityType").get(0)) - ) { - url = uiURLStem + "/entities/" + StringUtils.lowerCase(discoverResult.getSearchDocument( - discoverResult.getIndexableObjects().get(0)) - .get(0).getSearchFieldValues("entityType").get(0)) + "/" + i.getID(); - } else { - url = uiURLStem + "/items/" + i.getID(); + try { + DiscoverQuery discoveryQuery = new DiscoverQuery(); + discoveryQuery.setMaxResults(PAGE_SIZE); + discoveryQuery.setQuery("search.resourcetype:Community"); + do { + discoveryQuery.setStart(offset); + DiscoverResult discoverResult = searchService.search(c, discoveryQuery); + List docs = discoverResult.getIndexableObjects(); + commsCount = discoverResult.getTotalSearchResults(); + + for (IndexableObject doc : docs) { + String url = uiURLStem + "communities/" + doc.getID(); + c.uncacheEntity(doc.getIndexedObject()); + + if (makeHTMLMap) { + html.addURL(url, null); + } + if (makeSitemapOrg) { + sitemapsOrg.addURL(url, null); + } } - Date lastMod = i.getLastModified(); - - if (makeHTMLMap) { - html.addURL(url, lastMod); + offset += PAGE_SIZE; + } while (offset < commsCount); + + offset = 0; + discoveryQuery = new DiscoverQuery(); + discoveryQuery.setMaxResults(PAGE_SIZE); + discoveryQuery.setQuery("search.resourcetype:Collection"); + do { + discoveryQuery.setStart(offset); + DiscoverResult discoverResult = searchService.search(c, discoveryQuery); + List docs = discoverResult.getIndexableObjects(); + collsCount = discoverResult.getTotalSearchResults(); + + for (IndexableObject doc : docs) { + String url = uiURLStem + "collections/" + doc.getID(); + c.uncacheEntity(doc.getIndexedObject()); + + if (makeHTMLMap) { + html.addURL(url, null); + } + if (makeSitemapOrg) { + sitemapsOrg.addURL(url, null); + } } - if (makeSitemapOrg) { - sitemapsOrg.addURL(url, lastMod); + offset += PAGE_SIZE; + } while (offset < collsCount); + + offset = 0; + discoveryQuery = new DiscoverQuery(); + discoveryQuery.setMaxResults(PAGE_SIZE); + discoveryQuery.setQuery("search.resourcetype:Item"); + discoveryQuery.addSearchField("search.entitytype"); + do { + + discoveryQuery.setStart(offset); + DiscoverResult discoverResult = searchService.search(c, discoveryQuery); + List docs = discoverResult.getIndexableObjects(); + itemsCount = discoverResult.getTotalSearchResults(); + + for (IndexableObject doc : docs) { + String url; + List entityTypeFieldValues = discoverResult.getSearchDocument(doc).get(0) + .getSearchFieldValues("search.entitytype"); + if (CollectionUtils.isNotEmpty(entityTypeFieldValues)) { + url = uiURLStem + "entities/" + StringUtils.lowerCase(entityTypeFieldValues.get(0)) + "/" + + doc.getID(); + } else { + url = uiURLStem + "items/" + doc.getID(); + } + Date lastMod = doc.getLastModified(); + c.uncacheEntity(doc.getIndexedObject()); + + if (makeHTMLMap) { + html.addURL(url, null); + } + if (makeSitemapOrg) { + sitemapsOrg.addURL(url, null); + } } - } catch (SearchServiceException e) { - log.error("Failed getting entitytype through solr for item " + i.getID() + ": " + e.getMessage()); - } - - c.uncacheEntity(i); - - itemCount++; - } - - if (makeHTMLMap) { - int files = html.finish(); - log.info(LogHelper.getHeader(c, "write_sitemap", - "type=html,num_files=" + files + ",communities=" - + comms.size() + ",collections=" + colls.size() - + ",items=" + itemCount)); - } + offset += PAGE_SIZE; + } while (offset < itemsCount); - if (makeSitemapOrg) { - int files = sitemapsOrg.finish(); - log.info(LogHelper.getHeader(c, "write_sitemap", - "type=html,num_files=" + files + ",communities=" - + comms.size() + ",collections=" + colls.size() - + ",items=" + itemCount)); - } - - c.abort(); - } - - /** - * Ping all search engines configured in {@code dspace.cfg}. - * - * @throws UnsupportedEncodingException theoretically should never happen - */ - public static void pingConfiguredSearchEngines() - throws UnsupportedEncodingException { - String[] engineURLs = configurationService - .getArrayProperty("sitemap.engineurls"); - - if (ArrayUtils.isEmpty(engineURLs)) { - log.warn("No search engine URLs configured to ping"); - return; - } - - for (int i = 0; i < engineURLs.length; i++) { - try { - pingSearchEngine(engineURLs[i]); - } catch (MalformedURLException me) { - log.warn("Bad search engine URL in configuration: " - + engineURLs[i]); - } - } - } - - /** - * Ping the given search engine. - * - * @param engineURL Search engine URL minus protocol etc, e.g. - * {@code www.google.com} - * @throws MalformedURLException if the passed in URL is malformed - * @throws UnsupportedEncodingException theoretically should never happen - */ - public static void pingSearchEngine(String engineURL) - throws MalformedURLException, UnsupportedEncodingException { - // Set up HTTP proxy - if ((StringUtils.isNotBlank(configurationService.getProperty("http.proxy.host"))) - && (StringUtils.isNotBlank(configurationService.getProperty("http.proxy.port")))) { - System.setProperty("proxySet", "true"); - System.setProperty("proxyHost", configurationService - .getProperty("http.proxy.host")); - System.getProperty("proxyPort", configurationService - .getProperty("http.proxy.port")); - } - - String sitemapURL = configurationService.getProperty("dspace.ui.url") - + "/sitemap"; - - URL url = new URL(engineURL + URLEncoder.encode(sitemapURL, "UTF-8")); - - try { - HttpURLConnection connection = (HttpURLConnection) url - .openConnection(); - - BufferedReader in = new BufferedReader(new InputStreamReader( - connection.getInputStream())); - - String inputLine; - StringBuffer resp = new StringBuffer(); - while ((inputLine = in.readLine()) != null) { - resp.append(inputLine).append("\n"); + if (makeHTMLMap) { + int files = html.finish(); + log.info(LogHelper.getHeader(c, "write_sitemap", + "type=html,num_files=" + files + ",communities=" + + commsCount + ",collections=" + collsCount + + ",items=" + itemsCount)); } - in.close(); - if (connection.getResponseCode() == 200) { - log.info("Pinged " + url.toString() + " successfully"); - } else { - log.warn("Error response pinging " + url.toString() + ":\n" - + resp); + if (makeSitemapOrg) { + int files = sitemapsOrg.finish(); + log.info(LogHelper.getHeader(c, "write_sitemap", + "type=html,num_files=" + files + ",communities=" + + commsCount + ",collections=" + collsCount + + ",items=" + itemsCount)); } - } catch (IOException e) { - log.warn("Error pinging " + url.toString(), e); + } catch (SearchServiceException e) { + throw new RuntimeException(e); + } finally { + c.abort(); } } } diff --git a/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java index b238ccf061f3..067c76cce8b3 100644 --- a/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java @@ -8,7 +8,6 @@ package org.dspace.app.solrdatabaseresync; import org.apache.commons.cli.Options; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; /** @@ -27,11 +26,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableCl this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - return true; - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java b/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java index aae042d0cf01..6feb1e247551 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java +++ b/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java @@ -103,9 +103,9 @@ public boolean isDefinedMultTitles() { * @return true if the current set has all the prev. published fields */ public boolean isDefinedPubBefore() { - return (isFieldPresent("dc.date.issued") && + return isFieldPresent("dc.date.issued") && isFieldPresent("dc.identifier.citation") && - isFieldPresent("dc.publisher")); + isFieldPresent("dc.publisher"); } /** @@ -145,6 +145,9 @@ public Optional getField(String fieldName) { } catch (DCInputsReaderException e) { log.error(e.getMessage(), e); } + } else if (field.isRelationshipField() && + ("relation." + field.getRelationshipType()).equals(fieldName)) { + return Optional.of(field); } else { String fullName = field.getFieldName(); if (fullName.equals(fieldName)) { diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInputsReader.java b/dspace-api/src/main/java/org/dspace/app/util/DCInputsReader.java index ac8031880233..936d5d9c62dd 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/DCInputsReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/DCInputsReader.java @@ -31,6 +31,7 @@ import org.dspace.content.MetadataSchemaEnum; import org.dspace.core.Utils; import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.submit.factory.SubmissionServiceFactory; import org.dspace.submit.model.UploadConfiguration; import org.dspace.submit.model.UploadConfigurationService; import org.dspace.utils.DSpace; @@ -167,7 +168,8 @@ public List getInputsByCollection(Collection collection) throws DCInputsReaderException { SubmissionConfig config; try { - config = new SubmissionConfigReader().getSubmissionConfigByCollection(collection); + config = SubmissionServiceFactory.getInstance().getSubmissionConfigService() + .getSubmissionConfigByCollection(collection); String formName = config.getSubmissionName(); if (formName == null) { throw new DCInputsReaderException("No form designated as default"); @@ -238,7 +240,8 @@ public List getInputsBySubmissionName(String name) throws DCInputsReaderException { SubmissionConfig config; try { - config = new SubmissionConfigReader().getSubmissionConfigByName(name); + config = SubmissionServiceFactory.getInstance().getSubmissionConfigService() + .getSubmissionConfigByName(name); String formName = config.getSubmissionName(); if (formName == null) { throw new DCInputsReaderException("No form designated as default"); @@ -316,6 +319,9 @@ public List getInputsByGroup(String formName) // cache miss - construct new DCInputSet List>> pages = formDefns.get(formName); + if (pages == null) { + return results; + } Iterator>> iterator = pages.iterator(); diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java index 8eb3a0674049..57e6a3fafcea 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java @@ -7,8 +7,6 @@ */ package org.dspace.app.util; -import static org.dspace.content.Item.ANY; - import java.io.File; import java.sql.SQLException; import java.util.ArrayList; @@ -25,12 +23,17 @@ import org.dspace.content.Collection; import org.dspace.content.DSpaceObject; import org.dspace.content.InProgressSubmission; +import org.dspace.content.Item; import org.dspace.content.edit.EditItem; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; import org.dspace.core.Context; +import org.dspace.discovery.SearchServiceException; import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.services.RequestService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.versioning.ItemCorrectionService; +import org.springframework.beans.factory.annotation.Autowired; import org.w3c.dom.Document; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; @@ -58,6 +61,10 @@ */ public class SubmissionConfigReader { + + @Autowired + RequestService requestService; + /** * The ID of the default collection. Will never be the ID of a named * collection @@ -111,6 +118,23 @@ public class SubmissionConfigReader { */ private SubmissionConfig lastSubmissionConfig = null; + /** + * Collection Service instance, needed to interact with collection's + * stored data + */ + protected static final CollectionService collectionService + = ContentServiceFactory.getInstance().getCollectionService(); + + /** + * itemCorrectionService instance, needed to retrieve the handle correctly + * item correction actions + * + */ + protected static final ItemCorrectionService itemCorrectionService = + DSpaceServicesFactory.getInstance().getServiceManager() + .getServicesByType(ItemCorrectionService.class) + .get(0); + /** * Load Submission Configuration from the * item-submission.xml configuration file @@ -158,6 +182,9 @@ private void buildInputs(String fileName) throws SubmissionConfigReaderException } catch (FactoryConfigurationError fe) { throw new SubmissionConfigReaderException( "Cannot create Item Submission Configuration parser", fe); + } catch (SearchServiceException se) { + throw new SubmissionConfigReaderException( + "Cannot perform a discovery search for Item Submission Configuration", se); } catch (Exception e) { throw new SubmissionConfigReaderException( "Error creating Item Submission Configuration: " + e); @@ -229,8 +256,10 @@ public SubmissionConfig getSubmissionConfigByCollection(String collectionHandle) public SubmissionConfig getCorrectionSubmissionConfigByCollection(Collection collection) { CollectionService collService = ContentServiceFactory.getInstance().getCollectionService(); - String submitName = collService.getMetadataFirstValue(collection, - "cris", "submission", "definition-correction", ANY); + String submitName = + collService.getMetadataFirstValue( + collection, "cris", "submission", "definition-correction", Item.ANY + ); if (submitName != null) { SubmissionConfig subConfig = getSubmissionConfigByName(submitName); @@ -377,7 +406,7 @@ public SubmissionStepConfig getStepConfig(String stepID) * should correspond to the collection-form maps, the form definitions, and * the display/storage word pairs. */ - private void doNodes(Node n) throws SAXException, SubmissionConfigReaderException { + private void doNodes(Node n) throws SAXException, SearchServiceException, SubmissionConfigReaderException { if (n == null) { return; } @@ -418,24 +447,32 @@ private void doNodes(Node n) throws SAXException, SubmissionConfigReaderExceptio } } + + + /** * Process the submission-map section of the XML file. Each element looks * like: Extract * the collection handle and item submission name, put name in hashmap keyed * by the collection handle. */ - private void processMap(Node e) throws SAXException { + private void processMap(Node e) throws SAXException, SearchServiceException { + // create a context + Context context = new Context(); + NodeList nl = e.getChildNodes(); int len = nl.getLength(); for (int i = 0; i < len; i++) { Node nd = nl.item(i); if (nd.getNodeName().equals("name-map")) { String id = getAttribute(nd, "collection-handle"); + String entityType = getAttribute(nd, "collection-entity-type"); String value = getAttribute(nd, "submission-name"); String content = getValue(nd); - if (id == null) { + if (id == null && entityType == null) { throw new SAXException( - "name-map element is missing collection-handle attribute in 'item-submission.xml'"); + "name-map element is missing collection-handle or collection-entity-type attribute " + + "in 'item-submission.xml'"); } if (value == null) { throw new SAXException( @@ -445,7 +482,17 @@ private void processMap(Node e) throws SAXException { throw new SAXException( "name-map element has content in 'item-submission.xml', it should be empty."); } - collectionToSubmissionConfig.put(id, value); + if (id != null) { + collectionToSubmissionConfig.put(id, value); + + } else { + // get all collections for this entity-type + List collections = collectionService.findAllCollectionsByEntityType( context, + entityType); + for (Collection collection : collections) { + collectionToSubmissionConfig.putIfAbsent(collection.getHandle(), value); + } + } } // ignore any child node that isn't a "name-map" } } @@ -731,12 +778,25 @@ public List getCollectionsBySubmissionConfig(Context context, String return results; } - public SubmissionConfig getSubmissionConfigByInProgressSubmission(InProgressSubmission object) { + public SubmissionConfig getSubmissionConfigByInProgressSubmission(InProgressSubmission object, Context context) { if (object instanceof EditItem) { String submissionDefinition = ((EditItem) object).getMode().getSubmissionDefinition(); return getSubmissionConfigByName(submissionDefinition); } - return getSubmissionConfigByCollection(object.getCollection()); + if (isCorrectionItem(object.getItem(), context)) { + return getCorrectionSubmissionConfigByCollection(object.getCollection()); + } else { + return getSubmissionConfigByCollection(object.getCollection()); + } + } + + private boolean isCorrectionItem(Item item, Context context) { + try { + return itemCorrectionService.checkIfIsCorrectionItem(context, item); + } catch (Exception ex) { + log.error("An error occurs checking if the given item is a correction item.", ex); + return false; + } } -} +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java b/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java index 8f155b63307d..c1402499c444 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java @@ -51,6 +51,7 @@ import org.dspace.content.service.CommunityService; import org.dspace.content.service.ItemService; import org.dspace.core.Context; +import org.dspace.core.I18nUtil; import org.dspace.discovery.IndexableObject; import org.dspace.discovery.indexobject.IndexableCollection; import org.dspace.discovery.indexobject.IndexableCommunity; @@ -91,6 +92,7 @@ public class SyndicationFeed { // default DC fields for entry protected String defaultTitleField = "dc.title"; + protected String defaultDescriptionField = "dc.description"; protected String defaultAuthorField = "dc.contributor.author"; protected String defaultDateField = "dc.date.issued"; private static final String[] defaultDescriptionFields = @@ -196,15 +198,15 @@ public void populate(HttpServletRequest request, Context context, IndexableObjec // dso is null for the whole site, or a search without scope if (dso == null) { defaultTitle = configurationService.getProperty("dspace.name"); - feed.setDescription(localize(labels, MSG_FEED_DESCRIPTION)); + defaultDescriptionField = localize(labels, MSG_FEED_DESCRIPTION); objectURL = resolveURL(request, null); } else { Bitstream logo = null; if (dso instanceof IndexableCollection) { Collection col = ((IndexableCollection) dso).getIndexedObject(); defaultTitle = col.getName(); - feed.setDescription(collectionService.getMetadataFirstValue(col, - CollectionService.MD_SHORT_DESCRIPTION, Item.ANY)); + defaultDescriptionField = collectionService.getMetadataFirstValue(col, + CollectionService.MD_SHORT_DESCRIPTION, Item.ANY); logo = col.getLogo(); String cols = configurationService.getProperty("webui.feed.podcast.collections"); if (cols != null && cols.length() > 1 && cols.contains(col.getHandle())) { @@ -214,8 +216,8 @@ public void populate(HttpServletRequest request, Context context, IndexableObjec } else if (dso instanceof IndexableCommunity) { Community comm = ((IndexableCommunity) dso).getIndexedObject(); defaultTitle = comm.getName(); - feed.setDescription(communityService.getMetadataFirstValue(comm, - CommunityService.MD_SHORT_DESCRIPTION, Item.ANY)); + defaultDescriptionField = communityService.getMetadataFirstValue(comm, + CommunityService.MD_SHORT_DESCRIPTION, Item.ANY); logo = comm.getLogo(); String comms = configurationService.getProperty("webui.feed.podcast.communities"); if (comms != null && comms.length() > 1 && comms.contains(comm.getHandle())) { @@ -230,6 +232,12 @@ public void populate(HttpServletRequest request, Context context, IndexableObjec } feed.setTitle(labels.containsKey(MSG_FEED_TITLE) ? localize(labels, MSG_FEED_TITLE) : defaultTitle); + + if (defaultDescriptionField == null || defaultDescriptionField == "") { + defaultDescriptionField = I18nUtil.getMessage("org.dspace.app.util.SyndicationFeed.no-description"); + } + + feed.setDescription(defaultDescriptionField); feed.setLink(objectURL); feed.setPublishedDate(new Date()); feed.setUri(objectURL); diff --git a/dspace-api/src/main/java/org/dspace/app/util/TypeBindUtils.java b/dspace-api/src/main/java/org/dspace/app/util/TypeBindUtils.java new file mode 100644 index 000000000000..97104bbb63fe --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/util/TypeBindUtils.java @@ -0,0 +1,73 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.util; + +import java.util.List; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.content.InProgressSubmission; +import org.dspace.content.MetadataValue; +import org.dspace.content.authority.factory.ContentAuthorityServiceFactory; +import org.dspace.content.authority.service.MetadataAuthorityService; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; + +/** + * Utility methods for the type bind functionality. + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + * + */ +public class TypeBindUtils { + + private static final ConfigurationService configurationService = DSpaceServicesFactory + .getInstance().getConfigurationService(); + private static final ItemService itemService = ContentServiceFactory + .getInstance().getItemService(); + private static final MetadataAuthorityService metadataAuthorityService = ContentAuthorityServiceFactory + .getInstance().getMetadataAuthorityService(); + + private TypeBindUtils() {} + + /** + * This method gets the field used for type-bind. + * @return the field used for type-bind. + */ + public static String getTypeBindField() { + return configurationService.getProperty("submit.type-bind.field", "dc.type"); + } + + /** + * This method gets the value of the type-bind field from the current item. + * @return the value of the type-bind field from the current item. + */ + public static String getTypeBindValue(InProgressSubmission obj) { + List documentType = itemService.getMetadataByMetadataString( + obj.getItem(), getTypeBindField()); + + // check empty type-bind field + if (documentType == null || documentType.isEmpty() + || StringUtils.isBlank(documentType.get(0).getValue())) { + return null; + } + + MetadataValue typeBindValue = documentType.get(0); + + boolean isAuthorityAllowed = metadataAuthorityService.isAuthorityAllowed( + getTypeBindField().replace(".","_"), Constants.ITEM, obj.getCollection()); + if (isAuthorityAllowed && typeBindValue.getAuthority() != null) { + return typeBindValue.getAuthority(); + } + + return typeBindValue.getValue(); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationMethod.java b/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationMethod.java index 274779e92877..500ee04a979b 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationMethod.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationMethod.java @@ -153,6 +153,22 @@ public boolean allowSetPassword(Context context, public List getSpecialGroups(Context context, HttpServletRequest request) throws SQLException; + /** + * Returns true if the special groups returned by + * {@link org.dspace.authenticate.AuthenticationMethod#getSpecialGroups(Context, HttpServletRequest)} + * should be implicitly be added to the groups related to the current user. By + * default this is true if the authentication method is the actual + * authentication mechanism used by the user. + * @param context A valid DSpace context. + * @param request The request that started this operation, or null if not + * applicable. + * @return true is the special groups must be considered, false + * otherwise + */ + public default boolean areSpecialGroupsApplicable(Context context, HttpServletRequest request) { + return getName().equals(context.getAuthenticationMethod()); + } + /** * Authenticate the given or implicit credentials. * This is the heart of the authentication method: test the diff --git a/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationServiceImpl.java b/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationServiceImpl.java index a9449b87d4e3..1d67da37ecb3 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationServiceImpl.java @@ -179,10 +179,15 @@ public List getSpecialGroups(Context context, int totalLen = 0; for (AuthenticationMethod method : getAuthenticationMethodStack()) { - List gl = method.getSpecialGroups(context, request); - if (gl.size() > 0) { - result.addAll(gl); - totalLen += gl.size(); + + if (method.areSpecialGroupsApplicable(context, request)) { + + List gl = method.getSpecialGroups(context, request); + if (gl.size() > 0) { + result.addAll(gl); + totalLen += gl.size(); + } + } } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java index 9c37fcee4755..0c2be211a532 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java @@ -52,11 +52,6 @@ public class IPAuthentication implements AuthenticationMethod { */ private static Logger log = org.apache.logging.log4j.LogManager.getLogger(IPAuthentication.class); - /** - * Whether to look for x-forwarded headers for logging IP addresses - */ - protected static Boolean useProxies; - /** * All the IP matchers */ @@ -250,13 +245,18 @@ public List getSpecialGroups(Context context, HttpServletRequest request) log.debug(LogHelper.getHeader(context, "authenticated", "special_groups=" + gsb.toString() - + " (by IP=" + addr + ", useProxies=" + useProxies.toString() + ")" + + " (by IP=" + addr + ")" )); } return groups; } + @Override + public boolean areSpecialGroupsApplicable(Context context, HttpServletRequest request) { + return true; + } + @Override public int authenticate(Context context, String username, String password, String realm, HttpServletRequest request) throws SQLException { diff --git a/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java index f3c6022e02c2..585eaf9cd8b1 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java @@ -11,9 +11,11 @@ import java.io.IOException; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Hashtable; +import java.util.Iterator; import java.util.List; import javax.naming.NamingEnumeration; import javax.naming.NamingException; @@ -64,6 +66,7 @@ * @author Reuben Pasquini * @author Samuel Ottenhoff * @author Ivan Masár + * @author Michael Plate */ public class LDAPAuthentication implements AuthenticationMethod { @@ -391,7 +394,7 @@ private static class SpeakerToLDAP { protected String ldapGivenName = null; protected String ldapSurname = null; protected String ldapPhone = null; - protected String ldapGroup = null; + protected ArrayList ldapGroup = null; /** * LDAP settings @@ -406,9 +409,9 @@ private static class SpeakerToLDAP { final String ldap_surname_field; final String ldap_phone_field; final String ldap_group_field; - final boolean useTLS; + SpeakerToLDAP(Logger thelog) { ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); @@ -491,6 +494,8 @@ protected String getDNOfUser(String adminUser, String adminPassword, Context con try { SearchControls ctrls = new SearchControls(); ctrls.setSearchScope(ldap_search_scope_value); + // Fetch both user attributes '*' (eg. uid, cn) and operational attributes '+' (eg. memberOf) + ctrls.setReturningAttributes(new String[] {"*", "+"}); String searchName; if (useTLS) { @@ -547,7 +552,11 @@ protected String getDNOfUser(String adminUser, String adminPassword, Context con if (attlist[4] != null) { att = atts.get(attlist[4]); if (att != null) { - ldapGroup = (String) att.get(); + // loop through all groups returned by LDAP + ldapGroup = new ArrayList(); + for (NamingEnumeration val = att.getAll(); val.hasMoreElements(); ) { + ldapGroup.add((String) val.next()); + } } } @@ -693,15 +702,26 @@ public String getName() { /* * Add authenticated users to the group defined in dspace.cfg by * the authentication-ldap.login.groupmap.* key. + * + * @param dn + * The string containing distinguished name of the user + * + * @param group + * List of strings with LDAP dn of groups + * + * @param context + * DSpace context */ - private void assignGroups(String dn, String group, Context context) { + private void assignGroups(String dn, ArrayList group, Context context) { if (StringUtils.isNotBlank(dn)) { System.out.println("dn:" + dn); - int i = 1; - String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + i); - + int groupmapIndex = 1; + String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + groupmapIndex); boolean cmp; + + // groupmap contains the mapping of LDAP groups to DSpace groups + // outer loop with the DSpace groups while (groupMap != null) { String t[] = groupMap.split(":"); String ldapSearchString = t[0]; @@ -709,40 +729,73 @@ private void assignGroups(String dn, String group, Context context) { if (group == null) { cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ","); + + if (cmp) { + assignGroup(context, groupmapIndex, dspaceGroupName); + } } else { - cmp = StringUtils.equalsIgnoreCase(group, ldapSearchString); - } + // list of strings with dn from LDAP groups + // inner loop + Iterator groupIterator = group.iterator(); + while (groupIterator.hasNext()) { - if (cmp) { - // assign user to this group - try { - Group ldapGroup = groupService.findByName(context, dspaceGroupName); - if (ldapGroup != null) { - groupService.addMember(context, ldapGroup, context.getCurrentUser()); - groupService.update(context, ldapGroup); + // save the current entry from iterator for further use + String currentGroup = groupIterator.next(); + + // very much the old code from DSpace <= 7.5 + if (currentGroup == null) { + cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ","); } else { - // The group does not exist - log.warn(LogHelper.getHeader(context, - "ldap_assignGroupsBasedOnLdapDn", - "Group defined in authentication-ldap.login.groupmap." + i - + " does not exist :: " + dspaceGroupName)); + cmp = StringUtils.equalsIgnoreCase(currentGroup, ldapSearchString); + } + + if (cmp) { + assignGroup(context, groupmapIndex, dspaceGroupName); } - } catch (AuthorizeException ae) { - log.debug(LogHelper.getHeader(context, - "assignGroupsBasedOnLdapDn could not authorize addition to " + - "group", - dspaceGroupName)); - } catch (SQLException e) { - log.debug(LogHelper.getHeader(context, "assignGroupsBasedOnLdapDn could not find group", - dspaceGroupName)); } } - groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + ++i); + groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + ++groupmapIndex); } } } + /** + * Add the current authenticated user to the specified group + * + * @param context + * DSpace context + * + * @param groupmapIndex + * authentication-ldap.login.groupmap.* key index defined in dspace.cfg + * + * @param dspaceGroupName + * The DSpace group to add the user to + */ + private void assignGroup(Context context, int groupmapIndex, String dspaceGroupName) { + try { + Group ldapGroup = groupService.findByName(context, dspaceGroupName); + if (ldapGroup != null) { + groupService.addMember(context, ldapGroup, context.getCurrentUser()); + groupService.update(context, ldapGroup); + } else { + // The group does not exist + log.warn(LogHelper.getHeader(context, + "ldap_assignGroupsBasedOnLdapDn", + "Group defined in authentication-ldap.login.groupmap." + groupmapIndex + + " does not exist :: " + dspaceGroupName)); + } + } catch (AuthorizeException ae) { + log.debug(LogHelper.getHeader(context, + "assignGroupsBasedOnLdapDn could not authorize addition to " + + "group", + dspaceGroupName)); + } catch (SQLException e) { + log.debug(LogHelper.getHeader(context, "assignGroupsBasedOnLdapDn could not find group", + dspaceGroupName)); + } + } + @Override public boolean isUsed(final Context context, final HttpServletRequest request) { if (request != null && diff --git a/dspace-api/src/main/java/org/dspace/authenticate/OrcidAuthenticationBean.java b/dspace-api/src/main/java/org/dspace/authenticate/OrcidAuthenticationBean.java index f77d7e57119a..88797e9b1a79 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/OrcidAuthenticationBean.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/OrcidAuthenticationBean.java @@ -27,7 +27,10 @@ import org.dspace.core.Context; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; +import org.dspace.eperson.RegistrationData; +import org.dspace.eperson.RegistrationTypeEnum; import org.dspace.eperson.service.EPersonService; +import org.dspace.eperson.service.RegistrationDataService; import org.dspace.orcid.OrcidToken; import org.dspace.orcid.client.OrcidClient; import org.dspace.orcid.client.OrcidConfiguration; @@ -47,11 +50,15 @@ * ORCID authentication for DSpace. * * @author Luca Giamminonni (luca.giamminonni at 4science.it) - * */ public class OrcidAuthenticationBean implements AuthenticationMethod { + + public static final String ORCID_DEFAULT_FIRSTNAME = "Unnamed"; + public static final String ORCID_DEFAULT_LASTNAME = ORCID_DEFAULT_FIRSTNAME; public static final String ORCID_AUTH_ATTRIBUTE = "orcid-authentication"; + public static final String ORCID_REGISTRATION_TOKEN = "orcid-registration-token"; + public static final String ORCID_DEFAULT_REGISTRATION_URL = "/external-login/{0}"; private final static Logger LOGGER = LoggerFactory.getLogger(OrcidAuthenticationBean.class); @@ -78,6 +85,9 @@ public class OrcidAuthenticationBean implements AuthenticationMethod { @Autowired private OrcidTokenService orcidTokenService; + @Autowired + private RegistrationDataService registrationDataService; + @Override public int authenticate(Context context, String username, String password, String realm, HttpServletRequest request) throws SQLException { @@ -184,7 +194,7 @@ private int authenticateWithOrcid(Context context, String code, HttpServletReque return ePerson.canLogIn() ? logInEPerson(context, token, ePerson) : BAD_ARGS; } - return canSelfRegister() ? registerNewEPerson(context, person, token) : NO_SUCH_USER; + return canSelfRegister() ? createRegistrationData(context, request, person, token) : NO_SUCH_USER; } @@ -212,48 +222,59 @@ private ResearcherProfile findProfile(Context context, EPerson ePerson) throws S } } - private int registerNewEPerson(Context context, Person person, OrcidTokenResponseDTO token) throws SQLException { + private int createRegistrationData( + Context context, HttpServletRequest request, Person person, OrcidTokenResponseDTO token + ) throws SQLException { try { context.turnOffAuthorisationSystem(); - String email = getEmail(person) - .orElseThrow(() -> new IllegalStateException("The email is configured private on orcid")); - - String orcid = token.getOrcid(); - - EPerson eperson = ePersonService.create(context); + RegistrationData registrationData = + this.registrationDataService.create(context, token.getOrcid(), RegistrationTypeEnum.ORCID); - eperson.setNetid(orcid); + registrationData.setEmail(getEmail(person).orElse(null)); + setOrcidMetadataOnRegistration(context, registrationData, person, token); - eperson.setEmail(email); + registrationDataService.update(context, registrationData); - Optional firstName = getFirstName(person); - if (firstName.isPresent()) { - eperson.setFirstName(context, firstName.get()); - } - - Optional lastName = getLastName(person); - if (lastName.isPresent()) { - eperson.setLastName(context, lastName.get()); - } - eperson.setCanLogIn(true); - eperson.setSelfRegistered(true); - - setOrcidMetadataOnEPerson(context, eperson, token); - - ePersonService.update(context, eperson); - context.setCurrentUser(eperson); + request.setAttribute(ORCID_REGISTRATION_TOKEN, registrationData.getToken()); + context.commit(); context.dispatchEvents(); - return SUCCESS; - } catch (Exception ex) { LOGGER.error("An error occurs registering a new EPerson from ORCID", ex); context.rollback(); - return NO_SUCH_USER; } finally { context.restoreAuthSystemState(); + return NO_SUCH_USER; + } + } + + private void setOrcidMetadataOnRegistration( + Context context, RegistrationData registration, Person person, OrcidTokenResponseDTO token + ) throws SQLException, AuthorizeException { + String orcid = token.getOrcid(); + + setRegistrationMetadata(context, registration, "eperson.firstname", getFirstName(person)); + setRegistrationMetadata(context, registration, "eperson.lastname", getLastName(person)); + registrationDataService.setRegistrationMetadataValue(context, registration, "eperson", "orcid", null, orcid); + + for (String scope : token.getScopeAsArray()) { + registrationDataService.addMetadata(context, registration, "eperson", "orcid", "scope", scope); + } + } + + private void setRegistrationMetadata( + Context context, RegistrationData registration, String metadataString, String value) { + String[] split = metadataString.split("\\."); + String qualifier = split.length > 2 ? split[2] : null; + try { + registrationDataService.setRegistrationMetadataValue( + context, registration, split[0], split[1], qualifier, value + ); + } catch (SQLException | AuthorizeException ex) { + LOGGER.error("An error occurs setting metadata", ex); + throw new RuntimeException(ex); } } @@ -296,16 +317,20 @@ private Optional getEmail(Person person) { return Optional.ofNullable(emails.get(0).getEmail()); } - private Optional getFirstName(Person person) { + private String getFirstName(Person person) { return Optional.ofNullable(person.getName()) - .map(name -> name.getGivenNames()) - .map(givenNames -> givenNames.getContent()); + .map(name -> name.getGivenNames()) + .map(givenNames -> givenNames.getContent()) + .filter(StringUtils::isNotBlank) + .orElse(ORCID_DEFAULT_FIRSTNAME); } - private Optional getLastName(Person person) { + private String getLastName(Person person) { return Optional.ofNullable(person.getName()) - .map(name -> name.getFamilyName()) - .map(givenNames -> givenNames.getContent()); + .map(name -> name.getFamilyName()) + .map(givenNames -> givenNames.getContent()) + .filter(StringUtils::isNotBlank) + .orElse(ORCID_DEFAULT_LASTNAME); } private boolean canSelfRegister() { diff --git a/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java index 6d1ca862d307..0bf0f9bcbc95 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java @@ -11,6 +11,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Optional; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; @@ -23,6 +24,7 @@ import org.dspace.eperson.Group; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.EPersonService; +import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; /** @@ -52,12 +54,14 @@ public class PasswordAuthentication */ private static final Logger log = LogManager.getLogger(); + private static final ConfigurationService configurationService = + DSpaceServicesFactory.getInstance().getConfigurationService(); + private static final String PASSWORD_AUTHENTICATED = "password.authenticated"; private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); - /** * Look to see if this email address is allowed to register. *

      @@ -76,8 +80,7 @@ public boolean canSelfRegister(Context context, String email) throws SQLException { // Is there anything set in domain.valid? - String[] domains = DSpaceServicesFactory.getInstance().getConfigurationService() - .getArrayProperty("authentication-password.domain.valid"); + String[] domains = configurationService.getArrayProperty("authentication-password.domain.valid"); if ((domains == null) || (domains.length == 0)) { // No conditions set, so must be able to self register return true; @@ -146,8 +149,7 @@ public List getSpecialGroups(Context context, HttpServletRequest request) && StringUtils.isNotBlank( EPersonServiceFactory.getInstance().getEPersonService().getPasswordHash(context.getCurrentUser()) .toString())) { - String groupName = DSpaceServicesFactory.getInstance().getConfigurationService() - .getProperty("authentication-password.login.specialgroup"); + String groupName = configurationService.getProperty("authentication-password.login.specialgroup"); if ((groupName != null) && !groupName.trim().isEmpty()) { Group specialGroup = EPersonServiceFactory.getInstance().getGroupService() .findByName(context, groupName); @@ -169,6 +171,7 @@ public List getSpecialGroups(Context context, HttpServletRequest request) return Collections.EMPTY_LIST; } + /** * Check credentials: username must match the email address of an * EPerson record, and that EPerson must be allowed to login. @@ -275,4 +278,21 @@ public boolean canChangePassword(Context context, EPerson ePerson, String curren } return ePersonService.checkPassword(context, ePerson, currentPassword); } + + @Override + public boolean areSpecialGroupsApplicable(Context context, HttpServletRequest request) { + return isPasswordAuthenticationMethodInContext(context, request) || + isPasswordAuthenticatedInRequest(context, request); + } + + private boolean isPasswordAuthenticatedInRequest(Context context, HttpServletRequest request) { + return (context == null || StringUtils.isBlank(context.getAuthenticationMethod())) && + request != null && Optional.ofNullable(request.getAttribute(PASSWORD_AUTHENTICATED)) + .map(Boolean.class::cast) + .orElse(false); + } + + private boolean isPasswordAuthenticationMethodInContext(Context context, HttpServletRequest request) { + return AuthenticationMethod.super.areSpecialGroupsApplicable(context, request); + } } diff --git a/dspace-api/src/main/java/org/dspace/authority/CrisConsumer.java b/dspace-api/src/main/java/org/dspace/authority/CrisConsumer.java index eec4412c0c98..a83cc8692e31 100644 --- a/dspace-api/src/main/java/org/dspace/authority/CrisConsumer.java +++ b/dspace-api/src/main/java/org/dspace/authority/CrisConsumer.java @@ -193,7 +193,7 @@ private boolean isMetadataSkippable(MetadataValue metadata) { return true; } - if (isBlank(authority) && isMetadataWithEmptyAuthoritySkippable(metadata)) { + if (isBlank(authority) && (isBlank(metadata.getValue()) || isMetadataWithEmptyAuthoritySkippable(metadata))) { return true; } @@ -255,7 +255,7 @@ private String getFieldKey(MetadataValue metadata) { private Item buildRelatedItem(Context context, Item item, Collection collection, MetadataValue metadata, String entityType, String crisSourceId) throws Exception { - WorkspaceItem workspaceItem = workspaceItemService.create(context, collection, false); + WorkspaceItem workspaceItem = workspaceItemService.create(context, collection, useOfTemplate(metadata)); Item relatedItem = workspaceItem.getItem(); itemService.addMetadata(context, relatedItem, CRIS.getName(), "sourceId", null, null, crisSourceId); if (!hasEntityType(relatedItem, entityType)) { @@ -299,6 +299,17 @@ private boolean isSubmissionEnabled(MetadataValue value) { } } + private boolean useOfTemplate(MetadataValue value) { + + String useOfTemplateByMetadata = "cris.import.submission.enabled.entity." + + getFieldKey(value) + ".use-template"; + if (configurationService.hasProperty(useOfTemplateByMetadata)) { + return configurationService.getBooleanProperty(useOfTemplateByMetadata); + } + + return configurationService.getBooleanProperty("cris.import.submission.enabled.entity.use-template"); + } + private void fillRelatedItem(Context context, MetadataValue metadata, Item relatedItem, boolean alreadyPresent) throws SQLException { diff --git a/dspace-api/src/main/java/org/dspace/authority/filler/ExternalDataProviderImportFiller.java b/dspace-api/src/main/java/org/dspace/authority/filler/ExternalDataProviderImportFiller.java index ef218c76fb34..7a7d10e63499 100644 --- a/dspace-api/src/main/java/org/dspace/authority/filler/ExternalDataProviderImportFiller.java +++ b/dspace-api/src/main/java/org/dspace/authority/filler/ExternalDataProviderImportFiller.java @@ -7,7 +7,6 @@ */ package org.dspace.authority.filler; -import static org.apache.commons.collections.CollectionUtils.isEmpty; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.removeStart; import static org.apache.commons.lang3.StringUtils.startsWith; @@ -110,7 +109,11 @@ private void enrichItemWithExternalData(Context context, Item item, ExternalData } private boolean notAlreadyPresent(Item item, MetadataValueDTO value) { - return isEmpty(itemService.getMetadata(item, value.getSchema(), value.getElement(), value.getQualifier(), ANY)); + List metadataValues = itemService.getMetadata(item, value.getSchema(), + value.getElement(), value.getQualifier(), ANY); + + return metadataValues.stream().noneMatch(metadataValue -> + metadataValue.getValue().equals(value.getValue())); } private boolean isTitleNotSet(Item item) { diff --git a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java index 014de4671c8b..b5b53963eab6 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java @@ -47,6 +47,7 @@ import org.dspace.discovery.SearchServiceException; import org.dspace.discovery.indexobject.IndexableCollection; import org.dspace.discovery.indexobject.IndexableCommunity; +import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; @@ -466,7 +467,7 @@ public boolean isAdmin(Context c, EPerson e) throws SQLException { if (e == null) { return false; // anonymous users can't be admins.... } else { - return groupService.isMember(c, e, Group.ADMIN); + return groupService.isMember(c, e, c.getAdminGroup()); } } @@ -676,60 +677,6 @@ public ResourcePolicy findByTypeGroupAction(Context c, DSpaceObject dso, Group g } } - /** - * Generate Policies policies READ for the date in input adding reason. New policies are assigned automatically - * at the groups that - * have right on the collection. E.g., if the anonymous can access the collection policies are assigned to - * anonymous. - * - * @param context The relevant DSpace Context. - * @param embargoDate embargo end date - * @param reason embargo reason - * @param dso DSpace object - * @param owningCollection collection to get group policies from - * @throws SQLException if database error - * @throws AuthorizeException if authorization error - */ - @Override - public void generateAutomaticPolicies(Context context, Date embargoDate, - String reason, DSpaceObject dso, Collection owningCollection) - throws SQLException, AuthorizeException { - - if (embargoDate != null || (embargoDate == null && dso instanceof Bitstream)) { - - List authorizedGroups = getAuthorizedGroups(context, owningCollection, Constants.DEFAULT_ITEM_READ); - - removeAllPoliciesByDSOAndType(context, dso, ResourcePolicy.TYPE_CUSTOM); - - // look for anonymous - boolean isAnonymousInPlace = false; - for (Group g : authorizedGroups) { - if (StringUtils.equals(g.getName(), Group.ANONYMOUS)) { - isAnonymousInPlace = true; - } - } - if (!isAnonymousInPlace) { - // add policies for all the groups - for (Group g : authorizedGroups) { - ResourcePolicy rp = createOrModifyPolicy(null, context, null, g, null, embargoDate, Constants.READ, - reason, dso); - if (rp != null) { - resourcePolicyService.update(context, rp); - } - } - - } else { - // add policy just for anonymous - ResourcePolicy rp = createOrModifyPolicy(null, context, null, - groupService.findByName(context, Group.ANONYMOUS), null, - embargoDate, Constants.READ, reason, dso); - if (rp != null) { - resourcePolicyService.update(context, rp); - } - } - } - } - @Override public ResourcePolicy createResourcePolicy(Context context, DSpaceObject dso, Group group, EPerson eperson, int type, String rpType) throws SQLException, AuthorizeException { @@ -831,6 +778,19 @@ public boolean isCollectionAdmin(Context context) throws SQLException { return performCheck(context, "search.resourcetype:" + IndexableCollection.TYPE); } + /** + * Checks that the context's current user is an item admin in the site by querying the solr database. + * + * @param context context with the current user + * @return true if the current user is an item admin in the site + * false when this is not the case, or an exception occurred + * @throws java.sql.SQLException passed through. + */ + @Override + public boolean isItemAdmin(Context context) throws SQLException { + return performCheck(context, "search.resourcetype:" + IndexableItem.TYPE); + } + /** * Checks that the context's current user is a community or collection admin in the site. * diff --git a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java index 954bb9699038..c781400bae45 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java +++ b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java @@ -41,9 +41,16 @@ @Entity @Table(name = "resourcepolicy") public class ResourcePolicy implements ReloadableEntity { + /** This policy was set on submission, to give the submitter access. */ public static String TYPE_SUBMISSION = "TYPE_SUBMISSION"; + + /** This policy was set to allow access by a workflow group. */ public static String TYPE_WORKFLOW = "TYPE_WORKFLOW"; + + /** This policy was explicitly set on this object. */ public static String TYPE_CUSTOM = "TYPE_CUSTOM"; + + /** This policy was copied from the containing object's default policies. */ public static String TYPE_INHERITED = "TYPE_INHERITED"; @Id @@ -93,7 +100,7 @@ public class ResourcePolicy implements ReloadableEntity { private String rptype; @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "rpdescription") private String rpdescription; diff --git a/dspace-api/src/main/java/org/dspace/authorize/dao/ResourcePolicyDAO.java b/dspace-api/src/main/java/org/dspace/authorize/dao/ResourcePolicyDAO.java index d707bf200b4e..87bf459bcbeb 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/dao/ResourcePolicyDAO.java +++ b/dspace-api/src/main/java/org/dspace/authorize/dao/ResourcePolicyDAO.java @@ -40,6 +40,9 @@ public List findByDsoAndType(Context context, DSpaceObject dSpac public List findByDSoAndAction(Context context, DSpaceObject dso, int actionId) throws SQLException; + public void deleteByDsoAndTypeAndAction(Context context, DSpaceObject dSpaceObject, String type, int action) + throws SQLException; + public List findByDSoAndActionAndType(Context c, DSpaceObject o, int actionId, String type) throws SQLException; @@ -64,9 +67,6 @@ public List findByEPersonGroupTypeIdAction(Context context, EPer public void deleteByDsoAndAction(Context context, DSpaceObject dso, int actionId) throws SQLException; - public void deleteByDsoAndTypeAndAction(Context context, DSpaceObject dSpaceObject, String type, int action) - throws SQLException; - public void deleteByDsoAndType(Context context, DSpaceObject dSpaceObject, String type) throws SQLException; public void deleteByGroup(Context context, Group group) throws SQLException; diff --git a/dspace-api/src/main/java/org/dspace/authorize/dao/impl/ResourcePolicyDAOImpl.java b/dspace-api/src/main/java/org/dspace/authorize/dao/impl/ResourcePolicyDAOImpl.java index ee79933361d0..3c002459ff18 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/dao/impl/ResourcePolicyDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/dao/impl/ResourcePolicyDAOImpl.java @@ -125,6 +125,19 @@ public List findByDSoAndActionAndType(Context context, DSpaceObj return list(context, criteriaQuery, false, ResourcePolicy.class, -1, -1); } + @Override + public void deleteByDsoAndTypeAndAction(Context context, DSpaceObject dso, String type, int actionId) + throws SQLException { + String queryString = "delete from ResourcePolicy where dSpaceObject.id = :dsoId " + + "AND rptype = :rptype AND actionId= :actionId"; + Query query = createQuery(context, queryString); + query.setParameter("dsoId", dso.getID()); + query.setParameter("rptype", type); + query.setParameter("actionId", actionId); + query.executeUpdate(); + + } + @Override public List findByTypeGroupAction(Context context, DSpaceObject dso, Group group, int action) throws SQLException { @@ -203,19 +216,6 @@ public void deleteByDsoAndType(Context context, DSpaceObject dso, String type) t query.executeUpdate(); } - @Override - public void deleteByDsoAndTypeAndAction(Context context, DSpaceObject dso, String type, int actionId) - throws SQLException { - String queryString = "delete from ResourcePolicy where dSpaceObject.id = :dsoId " - + "AND rptype = :rptype AND actionId= :actionId"; - Query query = createQuery(context, queryString); - query.setParameter("dsoId", dso.getID()); - query.setParameter("rptype", type); - query.setParameter("actionId", actionId); - query.executeUpdate(); - - } - @Override public void deleteByGroup(Context context, Group group) throws SQLException { String queryString = "delete from ResourcePolicy where epersonGroup= :epersonGroup"; diff --git a/dspace-api/src/main/java/org/dspace/authorize/package-info.java b/dspace-api/src/main/java/org/dspace/authorize/package-info.java new file mode 100644 index 000000000000..f36c39cfe351 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authorize/package-info.java @@ -0,0 +1,67 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +/** + * Represents permissions for access to DSpace content. + * + *

      Philosophy

      + * DSpace's authorization system follows the classical "police state" + * philosophy of security - the user can do nothing, unless it is + * specifically allowed. Those permissions are spelled out with + * {@link ResourcePolicy} objects, stored in the {@code resourcepolicy} table + * in the database. + * + *

      Policies are attached to Content

      + * Resource Policies get assigned to all of the content objects in + * DSpace - collections, communities, items, bundles, and bitstreams. + * (Currently they are not attached to non-content objects such as + * {@code EPerson} or {@code Group}. But they could be, hence the name + * {@code ResourcePolicy} instead of {@code ContentPolicy}.) + * + *

      Policies are tuples

      + * Authorization is based on evaluating the tuple of (object, action, actor), + * such as (ITEM, READ, EPerson John Smith) to check if the {@code EPerson} + * "John Smith" can read an item. {@code ResourcePolicy} objects are pretty + * simple, describing a single instance of (object, action, actor). If + * multiple actors are desired, such as groups 10, 11, and 12 are allowed to + * READ Item 13, you simply create a {@code ResourcePolicy} for each group. + * + *

      Built-in groups

      + * The install process should create two built-in groups - {@code Anonymous} + * for anonymous/public access, and {@code Administrators} for administrators. + * Group {@code Anonymous} allows anyone access, even if not authenticated. + * Group {@code Administrators}' members have super-user rights, + * and are allowed to do any action to any object. + * + *

      Policy types + * Policies have a "type" used to distinguish policies which are applied for + * specific purposes. + *
      + *
      CUSTOM
      + *
      These are created and assigned explicitly by users.
      + *
      INHERITED
      + *
      These are copied from a containing object's default policies.
      + *
      SUBMISSION
      + *
      These are applied during submission to give the submitter access while + * composing a submission.
      + *
      WORKFLOW
      + *
      These are automatically applied during workflow, to give curators + * access to submissions in their curation queues. They usually have an + * automatically-created workflow group as the actor.
      + * + *

      Start and End dates

      + * A policy may have a start date and/or an end date. The policy is + * considered not valid before the start date or after the end date. No date + * means do not apply the related test. For example, embargo until a given + * date can be expressed by a READ policy with a given start date, and a + * limited-time offer by a READ policy with a given end date. + * + * @author dstuve + * @author mwood + */ +package org.dspace.authorize; diff --git a/dspace-api/src/main/java/org/dspace/authorize/package.html b/dspace-api/src/main/java/org/dspace/authorize/package.html deleted file mode 100644 index 66ce0f824773..000000000000 --- a/dspace-api/src/main/java/org/dspace/authorize/package.html +++ /dev/null @@ -1,68 +0,0 @@ - - - - - - - -

      Handles permissions for DSpace content. -

      - -

      Philosophy
      -DSpace's authorization system follows the classical "police state" -philosophy of security - the user can do nothing, unless it is -specifically allowed. Those permissions are spelled out with -ResourcePolicy objects, stored in the resourcepolicy table in the -database. -

      - -

      Policies are attached to Content

      -

      Policies are attached to Content
      -Resource Policies get assigned to all of the content objects in -DSpace - collections, communities, items, bundles, and bitstreams. -(Currently they are not attached to non-content objects such as EPerson -or Group. But they could be, hence the name ResourcePolicy instead of -ContentPolicy.) -

      - -

      Policies are tuples

      -Authorization is based on evaluating the tuple of (object, action, who), -such as (ITEM, READ, EPerson John Smith) to check if the EPerson "John Smith" -can read an item. ResourcePolicy objects are pretty simple, describing a single instance of -(object, action, who). If multiple who's are desired, such as Groups 10, 11, and -12 are allowed to READ Item 13, you simply create a ResourcePolicy for each -group. -

      - -

      Special Groups

      -The install process should create two special groups - group 0, for -anonymous/public access, and group 1 for administrators. -Group 0 (public/anonymous) allows anyone access, even if they are not -authenticated. Group 1's (admin) members have super-user rights, and -are allowed to do any action to any object. -

      - -

      Unused ResourcePolicy attributes

      -ResourcePolicies have a few attributes that are currently unused, -but are included with the intent that they will be used someday. -One is start and end dates, for when policies will be active, so that -permissions for content can change over time. The other is the EPerson - -policies could apply to only a single EPerson, but for ease of -administration currently a Group is the recommended unit to use to -describe 'who'. -

      - - - diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java index 43ae51544c9b..3db676d88b2b 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java +++ b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java @@ -489,24 +489,6 @@ public boolean isAnIdenticalPolicyAlreadyInPlace(Context c, DSpaceObject o, Grou public ResourcePolicy findByTypeGroupAction(Context c, DSpaceObject dso, Group group, int action) throws SQLException; - - /** - * Generate Policies policies READ for the date in input adding reason. New policies are assigned automatically - * at the groups that - * have right on the collection. E.g., if the anonymous can access the collection policies are assigned to - * anonymous. - * - * @param context current context - * @param embargoDate date - * @param reason reason - * @param dso DSpaceObject - * @param owningCollection collection - * @throws SQLException if database error - * @throws AuthorizeException if authorization error - */ - public void generateAutomaticPolicies(Context context, Date embargoDate, String reason, DSpaceObject dso, - Collection owningCollection) throws SQLException, AuthorizeException; - public ResourcePolicy createResourcePolicy(Context context, DSpaceObject dso, Group group, EPerson eperson, int type, String rpType) throws SQLException, AuthorizeException; @@ -551,6 +533,15 @@ void switchPoliciesAction(Context context, DSpaceObject dso, int fromAction, int */ boolean isCollectionAdmin(Context context) throws SQLException; + /** + * Checks that the context's current user is an item admin in the site by querying the solr database. + * + * @param context context with the current user + * @return true if the current user is an item admin in the site + * false when this is not the case, or an exception occurred + */ + boolean isItemAdmin(Context context) throws SQLException; + /** * Checks that the context's current user is a community or collection admin in the site. * @@ -646,7 +637,7 @@ long countAdminAuthorizedCollection(Context context, String query) /** * Replace all the policies in the target object with exactly the same policies that exist in the source object - * + * * @param context DSpace Context * @param source source of policies * @param dest destination of inherited policies diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java b/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java index beb3c34662df..662b14b18b2e 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java +++ b/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java @@ -56,12 +56,19 @@ public List find(Context c, EPerson e, List groups, int a throws SQLException; /** - * Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring IDs with a specific PolicyID. - * This method can be used to detect duplicate ResourcePolicies. + * Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring + * IDs with a specific PolicyID. This method can be used to detect duplicate + * ResourcePolicies. * - * @param notPolicyID ResourcePolicies with this ID will be ignored while looking out for equal ResourcePolicies. - * @return List of resource policies for the same DSpaceObject, group and action but other policyID. - * @throws SQLException + * @param context current DSpace session. + * @param dso find policies for this object. + * @param group find policies referring to this group. + * @param action find policies for this action. + * @param notPolicyID ResourcePolicies with this ID will be ignored while + * looking out for equal ResourcePolicies. + * @return List of resource policies for the same DSpaceObject, group and + * action but other policyID. + * @throws SQLException passed through. */ public List findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group, int action, int notPolicyID) @@ -71,6 +78,16 @@ public List findByTypeGroupActionExceptId(Context context, DSpac public boolean isDateValid(ResourcePolicy resourcePolicy); + /** + * Create and persist a copy of a given ResourcePolicy, with an empty + * dSpaceObject field. + * + * @param context current DSpace session. + * @param resourcePolicy the policy to be copied. + * @return the copy. + * @throws SQLException passed through. + * @throws AuthorizeException passed through. + */ public ResourcePolicy clone(Context context, ResourcePolicy resourcePolicy) throws SQLException, AuthorizeException; public void removeAllPolicies(Context c, DSpaceObject o) throws SQLException, AuthorizeException; @@ -123,6 +140,7 @@ public List findExceptRpType(Context c, DSpaceObject o, int acti * @param ePerson ePerson whose policies want to find * @param offset the position of the first result to return * @param limit paging limit + * @return some of the policies referring to {@code ePerson}. * @throws SQLException if database error */ public List findByEPerson(Context context, EPerson ePerson, int offset, int limit) diff --git a/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java b/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java index 14e439d5908c..5d5f2ccb755e 100644 --- a/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java +++ b/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java @@ -22,11 +22,13 @@ * This class holds all the information about a specifically configured * BrowseIndex. It is responsible for parsing the configuration, understanding * about what sort options are available, and what the names of the database - * tables that hold all the information are actually called. + * tables that hold all the information are actually called. Hierarchical browse + * indexes also contain information about the vocabulary they're using, see: + * {@link org.dspace.content.authority.DSpaceControlledVocabularyIndex} * * @author Richard Jones */ -public final class BrowseIndex { +public class BrowseIndex { /** the configuration number, as specified in the config */ /** * used for single metadata browse tables for generating the table name @@ -99,10 +101,10 @@ private BrowseIndex() { /** * Constructor for creating generic / internal index objects - * + * * @param baseName The base of the table name */ - private BrowseIndex(String baseName) { + protected BrowseIndex(String baseName) { this(baseName, "item"); } @@ -735,7 +737,7 @@ public static BrowseIndex getBrowseIndex(SortOption so) throws BrowseException { /** * Get the internally defined browse index for archived items. - * + * * @param displayType * * @return browse index diff --git a/dspace-api/src/main/java/org/dspace/browse/CrossLinks.java b/dspace-api/src/main/java/org/dspace/browse/CrossLinks.java index 1ce2e558866d..ec4cb199ea1d 100644 --- a/dspace-api/src/main/java/org/dspace/browse/CrossLinks.java +++ b/dspace-api/src/main/java/org/dspace/browse/CrossLinks.java @@ -108,7 +108,7 @@ public String findLinkType(String metadata) { } else { // Exact match, if the key field has no .* wildcard if (links.containsKey(metadata)) { - return links.get(key); + return links.get(metadata); } } } diff --git a/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java b/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java index c9c140fb0b5b..20c43fc37298 100644 --- a/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java +++ b/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java @@ -18,6 +18,7 @@ import org.dspace.core.Context; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.web.ContextUtil; /** * This class provides a standard interface to all item counting @@ -49,9 +50,20 @@ public class ItemCounter { */ private Context context; + /** + * This field is used to hold singular instance of a class. + * Singleton pattern is used but this class should be + * refactored to modern DSpace approach (injectible service). + */ + + private static ItemCounter instance; + protected ItemService itemService; protected ConfigurationService configurationService; + private boolean showStrengths; + private boolean useCache; + /** * Construct a new item counter which will use the given DSpace Context * @@ -63,21 +75,42 @@ public ItemCounter(Context context) throws ItemCountException { this.dao = ItemCountDAOFactory.getInstance(this.context); this.itemService = ContentServiceFactory.getInstance().getItemService(); this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + this.showStrengths = configurationService.getBooleanProperty("webui.strengths.show", false); + this.useCache = configurationService.getBooleanProperty("webui.strengths.cache", true); } /** - * Get the count of the items in the given container. If the configuration - * value webui.strengths.cache is equal to 'true' this will return the - * cached value if it exists. If it is equal to 'false' it will count - * the number of items in the container in real time. + * Get the singular instance of a class. + * It creates a new instance at the first usage of this method. + * + * @return instance af a class + * @throws ItemCountException when error occurs + */ + public static ItemCounter getInstance() throws ItemCountException { + if (instance == null) { + instance = new ItemCounter(ContextUtil.obtainCurrentRequestContext()); + } + return instance; + } + + /** + * Get the count of the items in the given container. If the configuration + * value webui.strengths.show is equal to 'true' this method will return all + * archived items. If the configuration value webui.strengths.show is equal to + * 'false' this method will return -1. + * If the configuration value webui.strengths.cache + * is equal to 'true' this will return the cached value if it exists. + * If it is equal to 'false' it will count the number of items + * in the container in real time. * * @param dso DSpaceObject * @return count * @throws ItemCountException when error occurs */ public int getCount(DSpaceObject dso) throws ItemCountException { - boolean useCache = configurationService.getBooleanProperty( - "webui.strengths.cache", true); + if (!showStrengths) { + return -1; + } if (useCache) { return dao.getCount(dso); diff --git a/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java b/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java index 0194be59f3a7..3676133a89f9 100644 --- a/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java +++ b/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java @@ -264,7 +264,7 @@ private void addLocationScopeFilter(DiscoverQuery query) { } private void addDefaultFilterQueries(DiscoverQuery query) { - DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(container); + DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, container); discoveryConfiguration.getDefaultFilterQueries().forEach(query::addFilterQueries); } diff --git a/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java b/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java index 6b16d51bfe1e..a12ac3b98a2e 100644 --- a/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java +++ b/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java @@ -245,7 +245,7 @@ protected void processBitstream(MostRecentChecksum info) throws SQLException { info.setProcessStartDate(new Date()); try { - Map checksumMap = bitstreamStorageService.computeChecksum(context, info.getBitstream()); + Map checksumMap = bitstreamStorageService.computeChecksum(context, info.getBitstream()); if (MapUtils.isNotEmpty(checksumMap)) { info.setBitstreamFound(true); if (checksumMap.containsKey("checksum")) { @@ -255,10 +255,16 @@ protected void processBitstream(MostRecentChecksum info) throws SQLException { if (checksumMap.containsKey("checksum_algorithm")) { info.setChecksumAlgorithm(checksumMap.get("checksum_algorithm").toString()); } + + // compare new checksum to previous checksum + info.setChecksumResult(compareChecksums(info.getExpectedChecksum(), info.getCurrentChecksum())); + + } else { + info.setCurrentChecksum(""); + info.setChecksumResult(getChecksumResultByCode(ChecksumResultCode.BITSTREAM_NOT_FOUND)); + info.setToBeProcessed(false); } - // compare new checksum to previous checksum - info.setChecksumResult(compareChecksums(info.getExpectedChecksum(), info.getCurrentChecksum())); } catch (IOException e) { // bitstream located, but file missing from asset store info.setChecksumResult(getChecksumResultByCode(ChecksumResultCode.BITSTREAM_NOT_FOUND)); diff --git a/dspace-api/src/main/java/org/dspace/cli/DSpaceSkipUnknownArgumentsParser.java b/dspace-api/src/main/java/org/dspace/cli/DSpaceSkipUnknownArgumentsParser.java new file mode 100644 index 000000000000..afd74a588d17 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/cli/DSpaceSkipUnknownArgumentsParser.java @@ -0,0 +1,77 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.cli; + +import java.util.ArrayList; +import java.util.List; +import java.util.Properties; + +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.DefaultParser; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; + +/** + * Extended version of the DefaultParser. This parser skip/ignore unknown arguments. + */ +public class DSpaceSkipUnknownArgumentsParser extends DefaultParser { + + + @Override + public CommandLine parse(Options options, String[] arguments) throws ParseException { + return super.parse(options, getOnlyKnownArguments(options, arguments)); + } + + @Override + public CommandLine parse(Options options, String[] arguments, Properties properties) throws ParseException { + return super.parse(options, getOnlyKnownArguments(options, arguments), properties); + } + + /** + * Parse the arguments according to the specified options and properties. + * @param options the specified Options + * @param arguments the command line arguments + * @param stopAtNonOption can be ignored - an unrecognized argument is ignored, an unrecognized argument doesn't + * stop the parsing and doesn't trigger a ParseException + * + * @return the list of atomic option and value tokens + * @throws ParseException if there are any problems encountered while parsing the command line tokens. + */ + @Override + public CommandLine parse(Options options, String[] arguments, boolean stopAtNonOption) throws ParseException { + return super.parse(options, getOnlyKnownArguments(options, arguments), stopAtNonOption); + } + + /** + * Parse the arguments according to the specified options and properties. + * @param options the specified Options + * @param arguments the command line arguments + * @param properties command line option name-value pairs + * @param stopAtNonOption can be ignored - an unrecognized argument is ignored, an unrecognized argument doesn't + * stop the parsing and doesn't trigger a ParseException + * + * @return the list of atomic option and value tokens + * @throws ParseException if there are any problems encountered while parsing the command line tokens. + */ + @Override + public CommandLine parse(Options options, String[] arguments, Properties properties, boolean stopAtNonOption) + throws ParseException { + return super.parse(options, getOnlyKnownArguments(options, arguments), properties, stopAtNonOption); + } + + + private String[] getOnlyKnownArguments(Options options, String[] arguments) { + List knownArguments = new ArrayList<>(); + for (String arg : arguments) { + if (options.hasOption(arg)) { + knownArguments.add(arg); + } + } + return knownArguments.toArray(new String[0]); + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java index f2a8680ee58d..0682082e03f8 100644 --- a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java @@ -7,12 +7,15 @@ */ package org.dspace.content; +import static org.apache.commons.lang.StringUtils.startsWith; + import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Spliterators; import java.util.UUID; import java.util.regex.Pattern; @@ -286,6 +289,11 @@ public void delete(Context context, Bitstream bitstream) throws SQLException, Au //Remove our bitstream from all our bundles final List bundles = bitstream.getBundles(); for (Bundle bundle : bundles) { + authorizeService.authorizeAction(context, bundle, Constants.REMOVE); + //We also need to remove the bitstream id when it's set as bundle's primary bitstream + if (bitstream.equals(bundle.getPrimaryBitstream())) { + bundle.unsetPrimaryBitstreamID(); + } bundle.removeBitstream(bitstream); } @@ -399,6 +407,13 @@ public Bitstream getBitstreamByName(Item item, String bundleName, String bitstre return null; } + @Override + public List getBitstreamByBundleName(Item item, String bundleName) throws SQLException { + return itemService.getBundles(item, bundleName).stream() + .flatMap(bundle -> bundle.getBitstreams().stream()) + .collect(Collectors.toList()); + } + @Override public Bitstream getFirstBitstream(Item item, String bundleName) throws SQLException { List bundles = itemService.getBundles(item, bundleName); @@ -413,7 +428,7 @@ public Bitstream getFirstBitstream(Item item, String bundleName) throws SQLExcep @Override public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLException { - Pattern pattern = Pattern.compile("^" + bitstream.getName() + ".([^.]+)$"); + Pattern pattern = getBitstreamNamePattern(bitstream); for (Bundle bundle : bitstream.getBundles()) { for (Item item : bundle.getItems()) { @@ -443,6 +458,13 @@ public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLEx return null; } + protected Pattern getBitstreamNamePattern(Bitstream bitstream) { + if (bitstream.getName() != null) { + return Pattern.compile("^" + Pattern.quote(bitstream.getName()) + ".([^.]+)$"); + } + return Pattern.compile("^" + bitstream.getName() + ".([^.]+)$"); + } + @Override public BitstreamFormat getFormat(Context context, Bitstream bitstream) throws SQLException { if (bitstream.getBitstreamFormat() == null) { @@ -530,6 +552,10 @@ public List findByItemAndBundleAndMetadata(Context context, Item item } + public boolean exists(Context context, UUID id) throws SQLException { + return this.bitstreamDAO.exists(context, Bitstream.class, id); + } + private boolean isContainedInBundleNamed(Bitstream bitstream, String name) { if (StringUtils.isEmpty(name)) { @@ -606,4 +632,63 @@ private Stream streamOf(Iterator iterator) { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(iterator, 0), false); } + @Override + public boolean isOriginalBitstream(DSpaceObject dso) throws SQLException { + + if (dso.getType() != Constants.BITSTREAM) { + return false; + } + + Bitstream bitstream = (Bitstream) dso; + + return bitstream.getBundles().stream() + .anyMatch(bundle -> "ORIGINAL".equals(bundle.getName())); + + } + + @Override + public void updateThumbnailResourcePolicies(Context context, Bitstream bitstream) throws SQLException { + getThumbnail(bitstream) + .ifPresent(thumbnail -> replacePolicies(context, bitstream, thumbnail)); + } + + private void replacePolicies(Context context, Bitstream bitstream, Bitstream thumbnail) { + try { + authorizeService.replaceAllPolicies(context, bitstream, thumbnail); + } catch (SQLException | AuthorizeException e) { + throw new RuntimeException(e); + } + } + + private Optional getThumbnail(Bitstream bitstream) throws SQLException { + return getItem(bitstream) + .flatMap(item -> getThumbnail(item, bitstream.getName())); + } + + private Optional getItem(Bitstream bitstream) throws SQLException { + return bitstream.getBundles().stream() + .flatMap(bundle -> bundle.getItems().stream()) + .findFirst(); + } + + private Optional getThumbnail(Item item, String name) { + List bundles = getThumbnailBundles(item); + if (CollectionUtils.isEmpty(bundles)) { + return Optional.empty(); + } + + return bundles.stream() + .flatMap(bundle -> bundle.getBitstreams().stream()) + .filter(bitstream -> startsWith(bitstream.getName(), name)) + .findFirst(); + } + + private List getThumbnailBundles(Item item) { + try { + return itemService.getBundles(item, "THUMBNAIL"); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/Bundle.java b/dspace-api/src/main/java/org/dspace/content/Bundle.java index 6c62c3dc9139..e5cbdb6ff244 100644 --- a/dspace-api/src/main/java/org/dspace/content/Bundle.java +++ b/dspace-api/src/main/java/org/dspace/content/Bundle.java @@ -126,7 +126,7 @@ public void setPrimaryBitstreamID(Bitstream bitstream) { * Unset the primary bitstream ID of the bundle */ public void unsetPrimaryBitstreamID() { - primaryBitstream = null; + setPrimaryBitstreamID(null); } /** diff --git a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java index 485f1d645130..e70af09bb610 100644 --- a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java @@ -8,6 +8,7 @@ package org.dspace.content; import static org.dspace.core.Constants.ADD; +import static org.dspace.core.Constants.READ; import static org.dspace.core.Constants.REMOVE; import static org.dspace.core.Constants.WRITE; @@ -34,6 +35,7 @@ import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.LogHelper; +import org.dspace.eperson.Group; import org.dspace.event.Event; import org.springframework.beans.factory.annotation.Autowired; @@ -74,14 +76,14 @@ public Bundle find(Context context, UUID id) throws SQLException { if (bundle == null) { if (log.isDebugEnabled()) { log.debug(LogHelper.getHeader(context, "find_bundle", - "not_found,bundle_id=" + id)); + "not_found,bundle_id=" + id)); } return null; } else { if (log.isDebugEnabled()) { log.debug(LogHelper.getHeader(context, "find_bundle", - "bundle_id=" + id)); + "bundle_id=" + id)); } return bundle; @@ -106,7 +108,7 @@ public Bundle create(Context context, Item item, String name) throws SQLExceptio log.info(LogHelper.getHeader(context, "create_bundle", "bundle_id=" - + bundle.getID())); + + bundle.getID())); // if we ever use the identifier service for bundles, we should // create the bundle before we create the Event and should add all @@ -132,12 +134,12 @@ public Bitstream getBitstreamByName(Bundle bundle, String name) { @Override public void addBitstream(Context context, Bundle bundle, Bitstream bitstream) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { // Check authorisation authorizeService.authorizeAction(context, bundle, Constants.ADD); log.info(LogHelper.getHeader(context, "add_bitstream", "bundle_id=" - + bundle.getID() + ",bitstream_id=" + bitstream.getID())); + + bundle.getID() + ",bitstream_id=" + bitstream.getID())); // First check that the bitstream isn't already in the list List bitstreams = bundle.getBitstreams(); @@ -167,12 +169,45 @@ public void addBitstream(Context context, Bundle bundle, Bitstream bitstream) context.addEvent(new Event(Event.ADD, Constants.BUNDLE, bundle.getID(), - Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), - getIdentifiers(context, bundle))); + Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), + getIdentifiers(context, bundle))); // copy authorization policies from bundle to bitstream // FIXME: multiple inclusion is affected by this... authorizeService.inheritPolicies(context, bundle, bitstream); + // The next logic is a bit overly cautious but ensures that if there are any future start dates + // on the item or bitstream read policies, that we'll skip inheriting anything from the owning collection + // just in case. In practice, the item install process would overwrite these anyway but it may satisfy + // some other bitstream creation methods and integration tests + boolean isEmbargoed = false; + for (ResourcePolicy resourcePolicy : authorizeService.getPoliciesActionFilter(context, owningItem, READ)) { + if (!resourcePolicyService.isDateValid(resourcePolicy)) { + isEmbargoed = true; + break; + } + } + if (owningItem != null && !isEmbargoed) { + // Resolve owning collection + Collection owningCollection = owningItem.getOwningCollection(); + if (owningCollection != null) { + // Get DEFAULT_BITSTREAM_READ policy from the collection + List defaultBitstreamReadGroups = + authorizeService.getAuthorizedGroups(context, owningCollection, + Constants.DEFAULT_BITSTREAM_READ); + log.info(defaultBitstreamReadGroups.size()); + // If this collection is configured with a DEFAULT_BITSTREAM_READ group, overwrite the READ policy + // inherited from the bundle with this policy. + if (!defaultBitstreamReadGroups.isEmpty()) { + // Remove read policies from the bitstream + authorizeService.removePoliciesActionFilter(context, bitstream, Constants.READ); + for (Group defaultBitstreamReadGroup : defaultBitstreamReadGroups) { + // Inherit this policy as READ, directly from the collection roles + authorizeService.addPolicy(context, bitstream, + Constants.READ, defaultBitstreamReadGroup, ResourcePolicy.TYPE_INHERITED); + } + } + } + } bitstreamService.update(context, bitstream); } @@ -183,12 +218,12 @@ public void removeBitstream(Context context, Bundle bundle, Bitstream bitstream) authorizeService.authorizeAction(context, bundle, Constants.REMOVE); log.info(LogHelper.getHeader(context, "remove_bitstream", - "bundle_id=" + bundle.getID() + ",bitstream_id=" + bitstream.getID())); + "bundle_id=" + bundle.getID() + ",bitstream_id=" + bitstream.getID())); context.addEvent(new Event(Event.REMOVE, Constants.BUNDLE, bundle.getID(), - Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), - getIdentifiers(context, bundle))); + Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), + getIdentifiers(context, bundle))); //Ensure that the last modified from the item is triggered ! Item owningItem = (Item) getParentObject(context, bundle); @@ -221,9 +256,9 @@ public void removeBitstream(Context context, Bundle bundle, Bitstream bitstream) @Override public void inheritCollectionDefaultPolicies(Context context, Bundle bundle, Collection collection) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { List policies = authorizeService.getPoliciesActionFilter(context, collection, - Constants.DEFAULT_BITSTREAM_READ); + Constants.DEFAULT_BITSTREAM_READ); // change the action to just READ // just don't call update on the resourcepolicies!!! @@ -231,7 +266,7 @@ public void inheritCollectionDefaultPolicies(Context context, Bundle bundle, Col if (!i.hasNext()) { throw new java.sql.SQLException("Collection " + collection.getID() - + " has no default bitstream READ policies"); + + " has no default bitstream READ policies"); } List newPolicies = new ArrayList(); @@ -246,7 +281,7 @@ public void inheritCollectionDefaultPolicies(Context context, Bundle bundle, Col @Override public void replaceAllBitstreamPolicies(Context context, Bundle bundle, List newpolicies) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { List bitstreams = bundle.getBitstreams(); if (CollectionUtils.isNotEmpty(bitstreams)) { for (Bitstream bs : bitstreams) { @@ -368,16 +403,16 @@ public void setOrder(Context context, Bundle bundle, UUID[] bitstreamIds) throws if (bitstream == null) { //This should never occur but just in case log.warn(LogHelper.getHeader(context, "Invalid bitstream id while changing bitstream order", - "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); + "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); continue; } // If we have a Bitstream not in the current list, log a warning & exit immediately if (!currentBitstreams.contains(bitstream)) { log.warn(LogHelper.getHeader(context, - "Encountered a bitstream not in this bundle while changing bitstream " + - "order. Bitstream order will not be changed.", - "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); + "Encountered a bitstream not in this bundle while changing bitstream " + + "order. Bitstream order will not be changed.", + "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); return; } updatedBitstreams.add(bitstream); @@ -386,9 +421,9 @@ public void setOrder(Context context, Bundle bundle, UUID[] bitstreamIds) throws // If our lists are different sizes, exit immediately if (updatedBitstreams.size() != currentBitstreams.size()) { log.warn(LogHelper.getHeader(context, - "Size of old list and new list do not match. Bitstream order will not be " + - "changed.", - "Bundle: " + bundle.getID())); + "Size of old list and new list do not match. Bitstream order will not be " + + "changed.", + "Bundle: " + bundle.getID())); return; } @@ -434,7 +469,7 @@ public DSpaceObject getAdminObject(Context context, Bundle bundle, int action) t } else if (AuthorizeConfiguration.canCollectionAdminPerformBitstreamDeletion()) { adminObject = collection; } else if (AuthorizeConfiguration - .canCommunityAdminPerformBitstreamDeletion()) { + .canCommunityAdminPerformBitstreamDeletion()) { adminObject = community; } break; @@ -442,10 +477,10 @@ public DSpaceObject getAdminObject(Context context, Bundle bundle, int action) t if (AuthorizeConfiguration.canItemAdminPerformBitstreamCreation()) { adminObject = item; } else if (AuthorizeConfiguration - .canCollectionAdminPerformBitstreamCreation()) { + .canCollectionAdminPerformBitstreamCreation()) { adminObject = collection; } else if (AuthorizeConfiguration - .canCommunityAdminPerformBitstreamCreation()) { + .canCommunityAdminPerformBitstreamCreation()) { adminObject = community; } break; @@ -477,7 +512,7 @@ public void update(Context context, Bundle bundle) throws SQLException, Authoriz // Check authorisation //AuthorizeManager.authorizeAction(ourContext, this, Constants.WRITE); log.info(LogHelper.getHeader(context, "update_bundle", "bundle_id=" - + bundle.getID())); + + bundle.getID())); super.update(context, bundle); bundleDAO.save(context, bundle); @@ -485,10 +520,10 @@ public void update(Context context, Bundle bundle) throws SQLException, Authoriz if (bundle.isModified() || bundle.isMetadataModified()) { if (bundle.isMetadataModified()) { context.addEvent(new Event(Event.MODIFY_METADATA, bundle.getType(), bundle.getID(), bundle.getDetails(), - getIdentifiers(context, bundle))); + getIdentifiers(context, bundle))); } context.addEvent(new Event(Event.MODIFY, Constants.BUNDLE, bundle.getID(), - null, getIdentifiers(context, bundle))); + null, getIdentifiers(context, bundle))); bundle.clearModified(); bundle.clearDetails(); } @@ -497,12 +532,12 @@ public void update(Context context, Bundle bundle) throws SQLException, Authoriz @Override public void delete(Context context, Bundle bundle) throws SQLException, AuthorizeException, IOException { log.info(LogHelper.getHeader(context, "delete_bundle", "bundle_id=" - + bundle.getID())); + + bundle.getID())); authorizeService.authorizeAction(context, bundle, Constants.DELETE); context.addEvent(new Event(Event.DELETE, Constants.BUNDLE, bundle.getID(), - bundle.getName(), getIdentifiers(context, bundle))); + bundle.getName(), getIdentifiers(context, bundle))); // Remove bitstreams List bitstreams = bundle.getBitstreams(); @@ -544,4 +579,8 @@ public Bundle findByLegacyId(Context context, int id) throws SQLException { public int countTotal(Context context) throws SQLException { return bundleDAO.countRows(context); } + + public boolean exists(Context context, UUID id) throws SQLException { + return this.bundleDAO.exists(context, Bundle.class, id); + } } diff --git a/dspace-api/src/main/java/org/dspace/content/Collection.java b/dspace-api/src/main/java/org/dspace/content/Collection.java index 7dadde72c90a..dbe2d35efe1e 100644 --- a/dspace-api/src/main/java/org/dspace/content/Collection.java +++ b/dspace-api/src/main/java/org/dspace/content/Collection.java @@ -29,6 +29,7 @@ import javax.persistence.Transient; import org.dspace.authorize.AuthorizeException; +import org.dspace.browse.ItemCountException; import org.dspace.content.comparator.NameAscendingComparator; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; @@ -341,4 +342,17 @@ private CollectionService getCollectionService() { return collectionService; } + /** + * return count of the collection items + * + * @return int + */ + public int countArchivedItems() { + try { + return collectionService.countArchivedItems(this); + } catch (ItemCountException e) { + throw new RuntimeException(e); + } + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java index 367c7a5d34b1..4b38b9b1c079 100644 --- a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java @@ -35,6 +35,8 @@ import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.browse.ItemCountException; +import org.dspace.browse.ItemCounter; import org.dspace.content.dao.CollectionDAO; import org.dspace.content.service.BitstreamService; import org.dspace.content.service.CollectionService; @@ -1217,4 +1219,41 @@ public int countCollectionsAdministeredByEntityType(String query, String entityT discoverQuery, query, entityType).getTotalSearchResults(); } + @Override + @SuppressWarnings("rawtypes") + public List findAllCollectionsByEntityType(Context context, String entityType) + throws SearchServiceException { + List collectionList = new ArrayList<>(); + + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE); + discoverQuery.addFilterQueries("dspace.entity.type:" + entityType); + + DiscoverResult discoverResult = searchService.search(context, discoverQuery); + List solrIndexableObjects = discoverResult.getIndexableObjects(); + + for (IndexableObject solrCollection : solrIndexableObjects) { + Collection c = ((IndexableCollection) solrCollection).getIndexedObject(); + collectionList.add(c); + } + return collectionList; + } + + /** + * Returns total collection archived items + * + * @param collection Collection + * @return total collection archived items + * @throws ItemCountException + */ + @Override + public int countArchivedItems(Collection collection) throws ItemCountException { + return ItemCounter.getInstance().getCount(collection); + } + + @Override + public boolean exists(Context context, UUID id) throws SQLException { + return this.collectionDAO.exists(context, Collection.class, id); + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/Community.java b/dspace-api/src/main/java/org/dspace/content/Community.java index fa99da33091a..dd6d978936df 100644 --- a/dspace-api/src/main/java/org/dspace/content/Community.java +++ b/dspace-api/src/main/java/org/dspace/content/Community.java @@ -25,6 +25,7 @@ import javax.persistence.Transient; import org.apache.commons.lang3.builder.HashCodeBuilder; +import org.dspace.browse.ItemCountException; import org.dspace.content.comparator.NameAscendingComparator; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CommunityService; @@ -264,4 +265,16 @@ private CommunityService getCommunityService() { return communityService; } + /** + * return count of the community items + * + * @return int + */ + public int countArchivedItems() { + try { + return communityService.countArchivedItems(this); + } catch (ItemCountException e) { + throw new RuntimeException(e); + } + } } diff --git a/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java index b4053a724f32..b74aa0aaa336 100644 --- a/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java @@ -25,6 +25,8 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.AuthorizeService; +import org.dspace.browse.ItemCountException; +import org.dspace.browse.ItemCounter; import org.dspace.content.dao.CommunityDAO; import org.dspace.content.service.BitstreamService; import org.dspace.content.service.CollectionService; @@ -82,7 +84,6 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl imp protected CommunityServiceImpl() { super(); - } @Override @@ -712,4 +713,22 @@ public Community findByLegacyId(Context context, int id) throws SQLException { public int countTotal(Context context) throws SQLException { return communityDAO.countRows(context); } + + /** + * Returns total community archived items + * + * @param community Community + * @return total community archived items + * @throws ItemCountException + */ + @Override + public int countArchivedItems(Community community) throws ItemCountException { + return ItemCounter.getInstance().getCount(community); + } + + @Override + public boolean exists(Context context, UUID id) throws SQLException { + return this.communityDAO.exists(context, Community.class, id); + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/InstallItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/InstallItemServiceImpl.java index 213bbcbaa0cc..df24075d548c 100644 --- a/dspace-api/src/main/java/org/dspace/content/InstallItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/InstallItemServiceImpl.java @@ -94,7 +94,7 @@ public Item installItem(Context c, InProgressSubmission is, // As this is a BRAND NEW item, as a final step we need to remove the // submitter item policies created during deposit and replace them with // the default policies from the collection. - itemService.inheritCollectionDefaultPolicies(c, item, collection); + itemService.inheritCollectionDefaultPolicies(c, item, collection, false); return item; } @@ -273,4 +273,28 @@ public String getBitstreamProvenanceMessage(Context context, Item myitem) return myMessage.toString(); } + + @Override + public String getSubmittedByProvenanceMessage(Context context, Item item) throws SQLException { + // get date + DCDate now = DCDate.getCurrent(); + + // Create provenance description + StringBuffer provmessage = new StringBuffer(); + + if (item.getSubmitter() != null) { + provmessage.append("Submitted by ").append(item.getSubmitter().getFullName()) + .append(" (").append(item.getSubmitter().getEmail()).append(") on ") + .append(now.toString()); + } else { + // else, null submitter + provmessage.append("Submitted by unknown (probably automated) on") + .append(now.toString()); + } + provmessage.append("\n"); + + // add sizes and checksums of bitstreams + provmessage.append(getBitstreamProvenanceMessage(context, item)); + return provmessage.toString(); + } } diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java index 3ad03377cb27..6b3ef003edca 100644 --- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java @@ -79,7 +79,9 @@ import org.dspace.event.Event; import org.dspace.harvest.HarvestedItem; import org.dspace.harvest.service.HarvestedItemService; +import org.dspace.identifier.DOI; import org.dspace.identifier.IdentifierException; +import org.dspace.identifier.service.DOIService; import org.dspace.identifier.service.IdentifierService; import org.dspace.layout.CrisLayoutBox; import org.dspace.layout.CrisLayoutField; @@ -146,6 +148,8 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl implements It @Autowired(required = true) protected IdentifierService identifierService; @Autowired(required = true) + protected DOIService doiService; + @Autowired(required = true) protected VersioningService versioningService; @Autowired(required = true) protected HarvestedItemService harvestedItemService; @@ -298,9 +302,7 @@ private List getThumbnailFields(List crisLayoutT * @param context * @param item * @param bundle - * @param metadata * @param value - * @param requireOriginal * @throws SQLException * @return Bitstream */ @@ -962,6 +964,16 @@ protected void rawDelete(Context context, Item item) throws AuthorizeException, // Remove any Handle handleService.unbindHandle(context, item); + // Delete a DOI if linked to the item. + // If no DOI consumer or provider is configured, but a DOI remains linked to this item's uuid, + // hibernate will throw a foreign constraint exception. + // Here we use the DOI service directly as it is able to manage DOIs even without any configured + // consumer or provider. + DOI doi = doiService.findDOIByDSpaceObject(context, item); + if (doi != null) { + doi.setDSpaceObject(null); + } + // remove version attached to the item removeVersion(context, item); @@ -1185,9 +1197,17 @@ public void removeGroupPolicies(Context context, Item item, Group group) throws @Override public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection) - throws SQLException, AuthorizeException { - adjustItemPolicies(context, item, collection); - adjustBundleBitstreamPolicies(context, item, collection); + throws SQLException, AuthorizeException { + inheritCollectionDefaultPolicies(context, item, collection, true); + } + + @Override + public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection, + boolean replaceReadRPWithCollectionRP) + throws SQLException, AuthorizeException { + + adjustItemPolicies(context, item, collection, replaceReadRPWithCollectionRP); + adjustBundleBitstreamPolicies(context, item, collection, replaceReadRPWithCollectionRP); log.debug(LogHelper.getHeader(context, "item_inheritCollectionDefaultPolicies", "item_id=" + item.getID())); @@ -1195,46 +1215,120 @@ public void inheritCollectionDefaultPolicies(Context context, Item item, Collect @Override public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection) - throws SQLException, AuthorizeException { - List defaultCollectionPolicies = authorizeService - .getPoliciesActionFilter(context, collection, Constants.DEFAULT_BITSTREAM_READ); + throws SQLException, AuthorizeException { + adjustBundleBitstreamPolicies(context, item, collection, true); + } + + @Override + public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection, + boolean replaceReadRPWithCollectionRP) + throws SQLException, AuthorizeException { + // Bundles should inherit from DEFAULT_ITEM_READ so that if the item is readable, the files + // can be listed (even if they are themselves not readable as per DEFAULT_BITSTREAM_READ or other + // policies or embargos applied + List defaultCollectionBundlePolicies = authorizeService + .getPoliciesActionFilter(context, collection, Constants.DEFAULT_ITEM_READ); + // Bitstreams should inherit from DEFAULT_BITSTREAM_READ + List defaultCollectionBitstreamPolicies = authorizeService + .getPoliciesActionFilter(context, collection, Constants.DEFAULT_BITSTREAM_READ); List defaultItemPolicies = authorizeService.findPoliciesByDSOAndType(context, item, ResourcePolicy.TYPE_CUSTOM); - if (defaultCollectionPolicies.size() < 1) { + if (defaultCollectionBitstreamPolicies.size() < 1) { throw new SQLException("Collection " + collection.getID() + " (" + collection.getHandle() + ")" + " has no default bitstream READ policies"); } + // TODO: should we also throw an exception if no DEFAULT_ITEM_READ? + // TODO: should we also throw an exception if no DEFAULT_ITEM_READ? + + boolean removeCurrentReadRPBitstream = + replaceReadRPWithCollectionRP && defaultCollectionBitstreamPolicies.size() > 0; + boolean removeCurrentReadRPBundle = + replaceReadRPWithCollectionRP && defaultCollectionBundlePolicies.size() > 0; // remove all policies from bundles, add new ones // Remove bundles List bunds = item.getBundles(); for (Bundle mybundle : bunds) { + // If collection has default READ policies, remove the bundle's READ policies. + if (removeCurrentReadRPBundle) { + authorizeService.removePoliciesActionFilter(context, mybundle, Constants.READ); + } // if come from InstallItem: remove all submission/workflow policies authorizeService.removeAllPoliciesByDSOAndType(context, mybundle, ResourcePolicy.TYPE_SUBMISSION); authorizeService.removeAllPoliciesByDSOAndType(context, mybundle, ResourcePolicy.TYPE_WORKFLOW); addCustomPoliciesNotInPlace(context, mybundle, defaultItemPolicies); - addDefaultPoliciesNotInPlace(context, mybundle, defaultCollectionPolicies); + addDefaultPoliciesNotInPlace(context, mybundle, defaultCollectionBundlePolicies); for (Bitstream bitstream : mybundle.getBitstreams()) { + // If collection has default READ policies, remove the bundle's READ policies. + if (removeCurrentReadRPBitstream) { + authorizeService.removePoliciesActionFilter(context, bitstream, Constants.READ); + } + // if come from InstallItem: remove all submission/workflow policies - authorizeService.removeAllPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_SUBMISSION); - authorizeService.removeAllPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_WORKFLOW); - addCustomPoliciesNotInPlace(context, bitstream, defaultItemPolicies); - addDefaultPoliciesNotInPlace(context, bitstream, defaultCollectionPolicies); + removeAllPoliciesAndAddDefault(context, bitstream, defaultItemPolicies, + defaultCollectionBitstreamPolicies); } } } + @Override + public void adjustBitstreamPolicies(Context context, Item item, Collection collection, Bitstream bitstream) + throws SQLException, AuthorizeException { + adjustBitstreamPolicies(context, item, collection, bitstream, true); + } + + @Override + public void adjustBitstreamPolicies(Context context, Item item, Collection collection , Bitstream bitstream, + boolean replaceReadRPWithCollectionRP) + throws SQLException, AuthorizeException { + List defaultCollectionPolicies = authorizeService + .getPoliciesActionFilter(context, collection, Constants.DEFAULT_BITSTREAM_READ); + + List defaultItemPolicies = authorizeService.findPoliciesByDSOAndType(context, item, + ResourcePolicy.TYPE_CUSTOM); + if (defaultCollectionPolicies.size() < 1) { + throw new SQLException("Collection " + collection.getID() + + " (" + collection.getHandle() + ")" + + " has no default bitstream READ policies"); + } + + // remove all policies from bitstream, add new ones + removeAllPoliciesAndAddDefault(context, bitstream, defaultItemPolicies, defaultCollectionPolicies); + } + + private void removeAllPoliciesAndAddDefault(Context context, Bitstream bitstream, + List defaultItemPolicies, + List defaultCollectionPolicies) + throws SQLException, AuthorizeException { + authorizeService.removeAllPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_SUBMISSION); + authorizeService.removeAllPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_WORKFLOW); + addCustomPoliciesNotInPlace(context, bitstream, defaultItemPolicies); + addDefaultPoliciesNotInPlace(context, bitstream, defaultCollectionPolicies); + } + @Override public void adjustItemPolicies(Context context, Item item, Collection collection) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { + adjustItemPolicies(context, item, collection, true); + } + + @Override + public void adjustItemPolicies(Context context, Item item, Collection collection, + boolean replaceReadRPWithCollectionRP) + throws SQLException, AuthorizeException { // read collection's default READ policies List defaultCollectionPolicies = authorizeService .getPoliciesActionFilter(context, collection, Constants.DEFAULT_ITEM_READ); + // If collection has defaultREAD policies, remove the item's READ policies. + if (replaceReadRPWithCollectionRP && defaultCollectionPolicies.size() > 0) { + authorizeService.removePoliciesActionFilter(context, item, Constants.READ); + } + // MUST have default policies if (defaultCollectionPolicies.size() < 1) { throw new SQLException("Collection " + collection.getID() @@ -1444,9 +1538,18 @@ public boolean isInProgressSubmission(Context context, Item item) throws SQLExce */ - @Override + /** + * Add the default policies, which have not been already added to the given DSpace object + * + * @param context The relevant DSpace Context. + * @param dso The DSpace Object to add policies to + * @param defaultCollectionPolicies list of policies + * @throws SQLException An exception that provides information on a database access error or other errors. + * @throws AuthorizeException Exception indicating the current user of the context does not have permission + * to perform a particular action. + */ public void addDefaultPoliciesNotInPlace(Context context, DSpaceObject dso, - List defaultCollectionPolicies) throws SQLException, AuthorizeException { + List defaultCollectionPolicies) throws SQLException, AuthorizeException { boolean appendMode = configurationService .getBooleanProperty("core.authorization.installitem.inheritance-read.append-mode", false); for (ResourcePolicy defaultPolicy : defaultCollectionPolicies) { @@ -1741,7 +1844,7 @@ public boolean isItemListedForUser(Context context, Item item) { @Override public Iterator findByIds(Context context, List ids) throws SQLException { return itemDAO.findByIds(context, - ids.stream().map(uuid -> UUID.fromString(uuid)).collect(Collectors.toList())); + ids.stream().map(uuid -> UUID.fromString(uuid)).distinct().collect(Collectors.toList())); } @Override @@ -2138,4 +2241,16 @@ public boolean isLatestVersion(Context context, Item item) throws SQLException { } + @Override + public void addResourcePolicy(Context context, Item item, int actionID, EPerson eperson) + throws SQLException, AuthorizeException { + ResourcePolicy resourcePolicy = + this.authorizeService.createResourcePolicy(context, item, null, eperson, actionID, null); + item.getResourcePolicies().add(resourcePolicy); + } + + public boolean exists(Context context, UUID id) throws SQLException { + return this.itemDAO.exists(context, Item.class, id); + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataSchemaEnum.java b/dspace-api/src/main/java/org/dspace/content/MetadataSchemaEnum.java index fa45ed15e007..7babfce3145b 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataSchemaEnum.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataSchemaEnum.java @@ -18,7 +18,8 @@ public enum MetadataSchemaEnum { EPERSON("eperson"), RELATION("relation"), CRIS("cris"), - OAIRECERIF("oairecerif"); + OAIRECERIF("oairecerif"), + PERSON("person"); /** * The String representation of the MetadataSchemaEnum diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataValue.java b/dspace-api/src/main/java/org/dspace/content/MetadataValue.java index 639cec0e0c30..923b5575fa46 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataValue.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataValue.java @@ -61,7 +61,7 @@ public class MetadataValue implements ReloadableEntity { * The value of the field */ @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "text_value") private String value; diff --git a/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataServiceImpl.java index daf9a34378ac..9e0c72258e56 100644 --- a/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataServiceImpl.java @@ -110,7 +110,8 @@ protected List findLatestForDiscoveryMetadataValues( // on the left item as a storage/performance improvement. // As a consequence, when searching for related items (using discovery) // on the pages of the right items you won't be able to find the left item. - if (relationshipType.getTilted() != RIGHT && itemEntityType.equals(relationshipType.getLeftType())) { + if (relationshipType.getTilted() != RIGHT + && Objects.equals(relationshipType.getLeftType(), itemEntityType)) { String element = relationshipType.getLeftwardType(); List data = relationshipService .findByLatestItemAndRelationshipType(context, item, relationshipType, true); diff --git a/dspace-api/src/main/java/org/dspace/content/RelationshipPlacesIndexingServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/RelationshipPlacesIndexingServiceImpl.java index 1ed14b4fbe1f..f29e209d7790 100644 --- a/dspace-api/src/main/java/org/dspace/content/RelationshipPlacesIndexingServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/RelationshipPlacesIndexingServiceImpl.java @@ -55,7 +55,9 @@ public void updateRelationReferences(final Context context, final Relationship r if (singleDirectionRelationship("right", relationship.getRelationshipType())) { times = relation.getLeftPlace() - relation.getRightPlace(); } - rightItemsIdsToAdd.addAll(Collections.nCopies(times, relation.getRightItem().getID().toString())); + if (times > 0) { + rightItemsIdsToAdd.addAll(Collections.nCopies(times, relation.getRightItem().getID().toString())); + } } if (!rightItemsIdsToAdd.isEmpty()) { @@ -79,7 +81,9 @@ public void updateRelationReferences(final Context context, final Relationship r if (singleDirectionRelationship("left", relationship.getRelationshipType())) { times = relation.getRightPlace() - relation.getLeftPlace(); } - leftItemsIdsToAdd.addAll(Collections.nCopies(times, relation.getLeftItem().getID().toString())); + if (times > 0) { + leftItemsIdsToAdd.addAll(Collections.nCopies(times, relation.getLeftItem().getID().toString())); + } } if (!leftItemsIdsToAdd.isEmpty()) { @@ -102,7 +106,9 @@ private void addRightItemsReferences(final Context context, final Relationship r if (singleDirectionRelationship("right", relationship.getRelationshipType())) { times = leftItemRelation.getLeftPlace() - leftItemRelation.getRightPlace(); } - rightItemsToAdd.addAll(Collections.nCopies(times, leftItemRelation.getRightItem().getID().toString())); + if (times > 0) { + rightItemsToAdd.addAll(Collections.nCopies(times, leftItemRelation.getRightItem().getID().toString())); + } } if (!rightItemsToAdd.isEmpty()) { indexingService.updateRelationForItem(leftItem.getID().toString(), @@ -122,7 +128,9 @@ private void addLeftItemsReferences(final Context context, final Relationship re if (singleDirectionRelationship("left", relationship.getRelationshipType())) { times = leftItemRelation.getRightPlace() - leftItemRelation.getLeftPlace(); } - rightItemsToAdd.addAll(Collections.nCopies(times, leftItemRelation.getLeftItem().getID().toString())); + if (times > 0) { + rightItemsToAdd.addAll(Collections.nCopies(times, leftItemRelation.getLeftItem().getID().toString())); + } } if (!rightItemsToAdd.isEmpty()) { indexingService.updateRelationForItem(rightItem.getID().toString(), diff --git a/dspace-api/src/main/java/org/dspace/content/SiteServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/SiteServiceImpl.java index 2f53ed0928a2..2b7aa368a5f1 100644 --- a/dspace-api/src/main/java/org/dspace/content/SiteServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/SiteServiceImpl.java @@ -105,4 +105,8 @@ public void delete(Context context, Site dso) throws SQLException, AuthorizeExce public int getSupportsTypeConstant() { return Constants.SITE; } + + public boolean exists(Context context, UUID id) throws SQLException { + return this.siteDAO.exists(context, Site.class, id); + } } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/AuthorityServiceUtils.java b/dspace-api/src/main/java/org/dspace/content/authority/AuthorityServiceUtils.java index cdb2c324ad96..db4dd6496f01 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/AuthorityServiceUtils.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/AuthorityServiceUtils.java @@ -14,6 +14,7 @@ import org.dspace.content.Collection; import org.dspace.core.Constants; import org.dspace.submit.model.UploadConfigurationService; +import org.dspace.submit.service.SubmissionConfigService; import org.springframework.beans.factory.annotation.Autowired; /** @@ -54,4 +55,32 @@ public String getSubmissionOrFormName(SubmissionConfigReader configReader, int d return null; } } + + /** + * + * @param submissionConfigService the Submission Config service + * @param dsoType the type of dspace object (ITEM or BITSTREAM) for all the + * other object null is returned + * @param collection the collection where the object stays + * @return the name of the submission form (if ITEM) or the name of the metadata + * form (BITSTREAM) + */ + public String getSubmissionOrFormName(SubmissionConfigService submissionConfigService, int dsoType, + Collection collection) { + switch (dsoType) { + case Constants.ITEM: + return submissionConfigService.getSubmissionConfigByCollection(collection).getSubmissionName(); + case Constants.BITSTREAM: + SubmissionConfig subCfg = submissionConfigService.getSubmissionConfigByCollection(collection); + for (int i = 0; i < subCfg.getNumberOfSteps(); i++) { + SubmissionStepConfig step = subCfg.getStep(i); + if (SubmissionStepConfig.UPLOAD_STEP_NAME.equalsIgnoreCase(step.getType())) { + return uploadConfigurationService.getMap().get(step.getId()).getMetadata(); + } + } + return null; + default: + return null; + } + } } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java index 6e0800457397..0c545d1592e2 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java @@ -27,7 +27,6 @@ import org.dspace.app.util.DCInputsReader; import org.dspace.app.util.DCInputsReaderException; import org.dspace.app.util.SubmissionConfig; -import org.dspace.app.util.SubmissionConfigReader; import org.dspace.app.util.SubmissionConfigReaderException; import org.dspace.content.Collection; import org.dspace.content.Item; @@ -37,9 +36,13 @@ import org.dspace.core.Constants; import org.dspace.core.Utils; import org.dspace.core.service.PluginService; +import org.dspace.discovery.configuration.DiscoveryConfigurationService; +import org.dspace.discovery.configuration.DiscoverySearchFilterFacet; import org.dspace.services.ConfigurationService; +import org.dspace.submit.factory.SubmissionServiceFactory; import org.dspace.submit.model.UploadConfiguration; import org.dspace.submit.model.UploadConfigurationService; +import org.dspace.submit.service.SubmissionConfigService; import org.springframework.beans.factory.annotation.Autowired; /** @@ -93,8 +96,11 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService protected Map>> authoritiesFormDefinitions = new HashMap>>(); + // Map of vocabulary authorities to and their index info equivalent + protected Map vocabularyIndexMap = new HashMap<>(); + // the item submission reader - private SubmissionConfigReader itemSubmissionConfigReader; + private SubmissionConfigService submissionConfigService; @Autowired(required = true) protected ConfigurationService configurationService; @@ -106,6 +112,8 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService protected AuthorityServiceUtils authorityServiceUtils; @Autowired(required = true) protected ItemService itemService; + @Autowired + private DiscoveryConfigurationService searchConfigurationService; final static String CHOICES_PLUGIN_PREFIX = "choices.plugin."; final static String CHOICES_PRESENTATION_PREFIX = "choices.presentation."; @@ -152,7 +160,7 @@ public Set getChoiceAuthoritiesNames() { private synchronized void init() { if (!initialized) { try { - itemSubmissionConfigReader = new SubmissionConfigReader(); + submissionConfigService = SubmissionServiceFactory.getInstance().getSubmissionConfigService(); } catch (SubmissionConfigReaderException e) { // the system is in an illegal state as the submission definition is not valid throw new IllegalStateException("Error reading the item submission configuration: " + e.getMessage(), @@ -242,7 +250,7 @@ public String getChoiceAuthorityName(String schema, String element, String quali // check if it is the requested collection Map> controllerFormDefTypes = controllerFormDefinitions.get(fieldKey); Map controllerFormDef = controllerFormDefTypes.get(dsoType); - SubmissionConfig submissionConfig = itemSubmissionConfigReader.getSubmissionConfigByCollection(collection); + SubmissionConfig submissionConfig = submissionConfigService.getSubmissionConfigByCollection(collection); String submissionName = submissionConfig.getSubmissionName(); // check if the requested collection has a submission definition that use an authority for the metadata if (controllerFormDef.containsKey(submissionName)) { @@ -286,14 +294,14 @@ protected String makeFieldKey(String schema, String element, String qualifier) { } @Override - public void clearCache() { + public void clearCache() throws SubmissionConfigReaderException { controller.clear(); authorities.clear(); presentation.clear(); closed.clear(); controllerFormDefinitions.clear(); authoritiesFormDefinitions.clear(); - itemSubmissionConfigReader = null; + submissionConfigService.reload(); initialized = false; } @@ -343,7 +351,7 @@ private void loadChoiceAuthorityConfigurations() { */ private void autoRegisterChoiceAuthorityFromInputReader() { try { - List submissionConfigs = itemSubmissionConfigReader + List submissionConfigs = submissionConfigService .getAllSubmissionConfigs(Integer.MAX_VALUE, 0); DCInputsReader dcInputsReader = new DCInputsReader(); @@ -351,8 +359,19 @@ private void autoRegisterChoiceAuthorityFromInputReader() { for (SubmissionConfig subCfg : submissionConfigs) { String submissionName = subCfg.getSubmissionName(); List inputsBySubmissionName = dcInputsReader.getInputsBySubmissionName(submissionName); - autoRegisterChoiceAuthorityFromSubmissionForms(Constants.ITEM, submissionName, - inputsBySubmissionName); + List inputsByGroupOfAllSteps = new ArrayList(); + try { + List inputsByGroup = dcInputsReader.getInputsByGroup(submissionName); + inputsByGroupOfAllSteps.addAll(inputsByGroup); + for (DCInputSet step : inputsBySubmissionName) { + List inputsByGroupOfStep = dcInputsReader.getInputsByGroup(step.getFormName()); + inputsByGroupOfAllSteps.addAll(inputsByGroupOfStep); + } + } catch (DCInputsReaderException e) { + log.warn("Cannot load the groups of the submission: " + submissionName, e); + } + inputsBySubmissionName.addAll(inputsByGroupOfAllSteps); + autoRegisterChoiceAuthorityFromSubmissionForms(Constants.ITEM, submissionName, inputsBySubmissionName); } // loop over all the defined bitstream metadata submission configuration for (UploadConfiguration uploadCfg : uploadConfigurationService.getMap().values()) { @@ -363,8 +382,7 @@ private void autoRegisterChoiceAuthorityFromInputReader() { } } catch (DCInputsReaderException e) { // the system is in an illegal state as the submission definition is not valid - throw new IllegalStateException("Error reading the item submission configuration: " + e.getMessage(), - e); + throw new IllegalStateException("Error reading the item submission configuration: " + e.getMessage(), e); } } @@ -552,19 +570,27 @@ public ChoiceAuthority getAuthorityByFieldKeyCollection(String fieldKey, int dso init(); ChoiceAuthority ma = getAuthorityByFieldAndCollection(fieldKey, collection); if (ma == null && collection != null) { - String submissionName = authorityServiceUtils.getSubmissionOrFormName(itemSubmissionConfigReader, - dsoType, collection); - if (submissionName == null) { - log.warn("No submission name was found for object type " + dsoType + " in collection " - + collection.getHandle()); - return null; - } - Map> mapType2SubAuth = controllerFormDefinitions.get(fieldKey); - if (mapType2SubAuth != null) { - Map mapSubAuth = mapType2SubAuth.get(dsoType); - if (mapSubAuth != null) { - ma = mapSubAuth.get(submissionName); + SubmissionConfigService configReaderService; + try { + configReaderService = SubmissionServiceFactory.getInstance().getSubmissionConfigService(); + SubmissionConfig submissionName = configReaderService + .getSubmissionConfigByCollection(collection); + if (submissionName == null) { + log.warn("No submission name was found for object type " + dsoType + " in collection " + + collection.getHandle()); + return null; + } + Map> mapType2SubAuth = controllerFormDefinitions.get(fieldKey); + if (mapType2SubAuth != null) { + Map mapSubAuth = mapType2SubAuth.get(dsoType); + if (mapSubAuth != null) { + ma = mapSubAuth.get(submissionName.getSubmissionName()); + } } + } catch (SubmissionConfigReaderException e) { + // the system is in an illegal state as the submission definition is not valid + throw new IllegalStateException("Error reading the item submission configuration: " + e.getMessage(), + e); } } return ma; @@ -576,7 +602,7 @@ private String getCollectionFormName(String fieldKey, Collection collection) { return ""; } - String submissionName = authorityServiceUtils.getSubmissionOrFormName(itemSubmissionConfigReader, + String submissionName = authorityServiceUtils.getSubmissionOrFormName(submissionConfigService, Constants.ITEM, collection); return submissionName; @@ -690,4 +716,50 @@ private boolean isLinkableToAnEntityWithEntityType(ChoiceAuthority choiceAuthori return choiceAuthority instanceof LinkableEntityAuthority && entityType.equals(((LinkableEntityAuthority) choiceAuthority).getLinkedEntityType()); } + + @Override + public DSpaceControlledVocabularyIndex getVocabularyIndex(String nameVocab) { + if (this.vocabularyIndexMap.containsKey(nameVocab)) { + return this.vocabularyIndexMap.get(nameVocab); + } else { + init(); + ChoiceAuthority source = this.getChoiceAuthorityByAuthorityName(nameVocab); + if (source != null && source instanceof DSpaceControlledVocabulary) { + Set metadataFields = new HashSet<>(); + Map> formsToFields = this.authoritiesFormDefinitions.get(nameVocab); + for (Map.Entry> formToField : formsToFields.entrySet()) { + metadataFields.addAll(formToField.getValue().stream().map(value -> + StringUtils.replace(value, "_", ".")) + .collect(Collectors.toList())); + } + DiscoverySearchFilterFacet matchingFacet = null; + for (DiscoverySearchFilterFacet facetConfig : searchConfigurationService.getAllFacetsConfig()) { + boolean coversAllFieldsFromVocab = true; + for (String fieldFromVocab: metadataFields) { + boolean coversFieldFromVocab = false; + for (String facetMdField: facetConfig.getMetadataFields()) { + if (facetMdField.startsWith(fieldFromVocab)) { + coversFieldFromVocab = true; + break; + } + } + if (!coversFieldFromVocab) { + coversAllFieldsFromVocab = false; + break; + } + } + if (coversAllFieldsFromVocab) { + matchingFacet = facetConfig; + break; + } + } + DSpaceControlledVocabularyIndex vocabularyIndex = + new DSpaceControlledVocabularyIndex((DSpaceControlledVocabulary) source, metadataFields, + matchingFacet); + this.vocabularyIndexMap.put(nameVocab, vocabularyIndex); + return vocabularyIndex; + } + return null; + } + } } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/DCInputAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/DCInputAuthority.java index 9695f9c32552..ca9f42f13a3d 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/DCInputAuthority.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/DCInputAuthority.java @@ -48,6 +48,8 @@ * fields. */ public class DCInputAuthority extends SelfNamedPlugin implements ChoiceAuthority { + public static final String UNKNOWN_KEY = "UNKNOWN KEY "; + private static Logger log = org.apache.logging.log4j.LogManager.getLogger(DCInputAuthority.class); /** @@ -92,7 +94,7 @@ public static String[] getPluginNames() { initPluginNames(); } - return (String[]) ArrayUtils.clone(pluginNames); + return ArrayUtils.clone(pluginNames); } private static synchronized void initPluginNames() { @@ -205,17 +207,17 @@ public String getLabel(String key, String locale) { String[] labelsLocale = labels.get(locale); int pos = -1; // search in the values to return the label - for (int i = 0; i < valuesLocale.length; i++) { + for (int i = 0; valuesLocale != null && i < valuesLocale.length; i++) { if (valuesLocale[i].equals(key)) { pos = i; break; } } - if (pos != -1) { + if (pos != -1 && labelsLocale != null) { // return the label in the same position where we found the value return labelsLocale[pos]; } else { - return "UNKNOWN KEY " + key; + return UNKNOWN_KEY + key; } } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java new file mode 100644 index 000000000000..bf8194dbd53b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java @@ -0,0 +1,47 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.authority; + +import java.util.Set; + +import org.dspace.browse.BrowseIndex; +import org.dspace.discovery.configuration.DiscoverySearchFilterFacet; + +/** + * Helper class to transform a {@link org.dspace.content.authority.DSpaceControlledVocabulary} into a + * {@code BrowseIndexRest} + * cached by {@link org.dspace.content.authority.service.ChoiceAuthorityService#getVocabularyIndex(String)} + * + * @author Marie Verdonck (Atmire) on 04/05/2023 + */ +public class DSpaceControlledVocabularyIndex extends BrowseIndex { + + protected DSpaceControlledVocabulary vocabulary; + protected Set metadataFields; + protected DiscoverySearchFilterFacet facetConfig; + + public DSpaceControlledVocabularyIndex(DSpaceControlledVocabulary controlledVocabulary, Set metadataFields, + DiscoverySearchFilterFacet facetConfig) { + super(controlledVocabulary.vocabularyName); + this.vocabulary = controlledVocabulary; + this.metadataFields = metadataFields; + this.facetConfig = facetConfig; + } + + public DSpaceControlledVocabulary getVocabulary() { + return vocabulary; + } + + public Set getMetadataFields() { + return this.metadataFields; + } + + public DiscoverySearchFilterFacet getFacetConfig() { + return this.facetConfig; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/authority/ItemAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/ItemAuthority.java index 6ec39db9764f..173ea83f62ad 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/ItemAuthority.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/ItemAuthority.java @@ -42,6 +42,7 @@ import org.dspace.util.ItemAuthorityUtils; import org.dspace.util.UUIDUtils; import org.dspace.utils.DSpace; +import org.dspace.web.ContextUtil; /** * Sample authority to link a dspace item with another (i.e a publication with @@ -58,7 +59,7 @@ public class ItemAuthority implements ChoiceAuthority, LinkableEntityAuthority { /** the name assigned to the specific instance by the PluginService, @see {@link NameAwarePlugin} **/ private String authorityName; - private DSpace dspace = new DSpace(); + protected DSpace dspace = new DSpace(); protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); @@ -181,9 +182,8 @@ private List getChoiceListFromQueryResults(SolrDocumentList results, Str public String getLabel(String key, String locale) { String title = key; if (key != null) { - Context context = null; + Context context = getContext(); try { - context = new Context(); DSpaceObject dso = itemService.find(context, UUIDUtils.fromString(key)); if (dso != null) { title = dso.getName(); @@ -292,4 +292,9 @@ private boolean hasValidExternalSource(String sourceIdentifier) { return false; } + private Context getContext() { + Context context = ContextUtil.obtainCurrentRequestContext(); + return context != null ? context : new Context(); + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/OrcidAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/OrcidAuthority.java index 4dfe09cdec64..02ca7be701d2 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/OrcidAuthority.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/OrcidAuthority.java @@ -43,9 +43,11 @@ public class OrcidAuthority extends ItemAuthority { private static final Logger LOGGER = LoggerFactory.getLogger(OrcidAuthority.class); - public static final String ORCID_EXTRA = "data-person_identifier_orcid"; + private static final String IS_LATIN_REGEX = "\\p{IsLatin}+"; - public static final String INSTITUTION_EXTRA = "institution-affiliation-name"; + public static final String DEFAULT_ORCID_KEY = "person_identifier_orcid"; + + public static final String DEFAULT_INSTITUTION_KEY = "institution-affiliation-name"; private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); @@ -118,9 +120,13 @@ private String getTitle(ExpandedResult result) { String givenName = result.getGivenNames(); String familyName = result.getFamilyNames(); - String title = isNotBlank(givenName) ? capitalizeFully(givenName) : ""; - title += isNotBlank(familyName) ? " " + capitalizeFully(familyName) : ""; - + String capitalizedFamilyName = capitalizeFully(familyName); + String capitalizedGivenName = capitalizeFully(givenName); + String title = capitalizedFamilyName + ", " + capitalizedGivenName; + if (!givenName.matches(IS_LATIN_REGEX) || !familyName.matches(IS_LATIN_REGEX)) { + title = isNotBlank(familyName) ? capitalizeFully(familyName) : ""; + title += isNotBlank(givenName) ? " " + capitalizeFully(givenName) : ""; + } return title.trim(); } @@ -131,11 +137,24 @@ private String composeAuthorityValue(String orcid) { private Map composeExtras(ExpandedResult result) { Map extras = new HashMap<>(); - extras.put(ORCID_EXTRA, result.getOrcidId()); - + String orcidIdKey = getOrcidIdKey(); + String orcidId = result.getOrcidId(); + if (StringUtils.isNotBlank(orcidId)) { + if (useOrcidIDAsData()) { + extras.put("data-" + orcidIdKey, orcidId); + } + if (useOrcidIDForDisplaying()) { + extras.put(orcidIdKey, orcidId); + } + } + String institutionKey = getInstitutionKey(); String[] institutionNames = result.getInstitutionNames(); - if (ArrayUtils.isNotEmpty(institutionNames)) { - extras.put(INSTITUTION_EXTRA, String.join(", ", institutionNames)); + + if (ArrayUtils.isNotEmpty(institutionNames) && useInstitutionAsData()) { + extras.put("data-" + institutionKey, String.join(", ", institutionNames)); + } + if (ArrayUtils.isNotEmpty(institutionNames) && useInstitutionForDisplaying()) { + extras.put(institutionKey, String.join(", ", institutionNames)); } return extras; @@ -165,4 +184,41 @@ public static void setAccessToken(String accessToken) { OrcidAuthority.accessToken = accessToken; } + public String getOrcidIdKey() { + return configurationService.getProperty("cris.OrcidAuthority." + + getPluginInstanceName() + ".orcid-id.key", + DEFAULT_ORCID_KEY); + } + + public String getInstitutionKey() { + return configurationService.getProperty("cris.OrcidAuthority." + + getPluginInstanceName() + ".institution.key", + DEFAULT_INSTITUTION_KEY); + } + + public boolean useInstitutionAsData() { + return configurationService + .getBooleanProperty("cris.OrcidAuthority." + + getPluginInstanceName() + ".institution.as-data", true); + } + + public boolean useInstitutionForDisplaying() { + return configurationService + .getBooleanProperty("cris.OrcidAuthority." + + getPluginInstanceName() + ".institution.display", true); + } + + public boolean useOrcidIDAsData() { + return configurationService + .getBooleanProperty("cris.OrcidAuthority." + + getPluginInstanceName() + ".orcid-id.as-data", true); + } + + public boolean useOrcidIDForDisplaying() { + return configurationService + .getBooleanProperty("cris.OrcidAuthority." + + getPluginInstanceName() + ".orcid-id.display", true); + + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumer.java b/dspace-api/src/main/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumer.java index a78430fb574b..05f4e8aea3fa 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumer.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumer.java @@ -23,9 +23,12 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.ItemService; import org.dspace.core.Context; +import org.dspace.discovery.IndexingService; +import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.event.Consumer; import org.dspace.event.Event; import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.utils.DSpace; /** @@ -38,26 +41,18 @@ public class ReciprocalItemAuthorityConsumer implements Consumer { private static final Logger log = LogManager.getLogger(ReciprocalItemAuthorityConsumer.class); - private final Map reciprocalMetadata = new ConcurrentHashMap<>(); + private final ConfigurationService configurationService = new DSpace().getConfigurationService(); + private final ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + private final Map reciprocalMetadataMap = new ConcurrentHashMap<>(); private final transient Set processedHandles = new HashSet<>(); - private final ItemService itemService; - - public ReciprocalItemAuthorityConsumer() { - ConfigurationService confService = new DSpace().getConfigurationService(); - itemService = ContentServiceFactory.getInstance().getItemService(); - for (String conf : confService.getPropertyKeys("ItemAuthority.reciprocalMetadata")) { - reciprocalMetadata.put(conf.substring("ItemAuthority.reciprocalMetadata.".length()), - confService.getProperty(conf)); - reciprocalMetadata.put(confService.getProperty(conf), - conf.substring("ItemAuthority.reciprocalMetadata.".length())); - } - } + private final IndexingService indexer = DSpaceServicesFactory.getInstance().getServiceManager() + .getServiceByName(IndexingService.class.getName(), IndexingService.class); @Override public void initialize() throws Exception { - // nothing + iniReciprocalMetadata(); } @Override @@ -73,11 +68,11 @@ public void consume(Context ctx, Event event) throws Exception { } else { processedHandles.add(item.getID()); } - if (!reciprocalMetadata.isEmpty()) { - for (String k : reciprocalMetadata.keySet()) { + if (!reciprocalMetadataMap.isEmpty()) { + for (String k : reciprocalMetadataMap.keySet()) { String entityType = k.split("\\.", 2)[0]; String metadata = k.split("\\.", 2)[1]; - checkItemRefs(ctx, item, entityType, metadata, reciprocalMetadata.get(k)); + checkItemRefs(ctx, item, entityType, metadata, reciprocalMetadataMap.get(k)); } } } finally { @@ -127,6 +122,34 @@ private void assureReciprocalLink(Context ctx, Item target, String mdString, Str itemService.addMetadata(ctx, target, mdSplit[0], mdSplit[1], mdSplit.length > 2 ? mdSplit[2] : null, null, name, sourceUuid, Choices.CF_ACCEPTED); + reindexItem(ctx, target); + } + + private void reindexItem(Context ctx, Item target) throws SQLException { + IndexableItem item = new IndexableItem(target); + item.setIndexedObject(ctx.reloadEntity(item.getIndexedObject())); + String uniqueIndexID = item.getUniqueIndexID(); + if (uniqueIndexID != null) { + try { + indexer.indexContent(ctx, item, true, false, false); + log.debug("Indexed " + + item.getTypeText() + + ", id=" + item.getID() + + ", unique_id=" + uniqueIndexID); + } catch (Exception e) { + log.error("Failed while indexing object: ", e); + } + } + } + + private void iniReciprocalMetadata() { + List properties = configurationService.getPropertyKeys("ItemAuthority.reciprocalMetadata"); + for (String conf : properties) { + reciprocalMetadataMap.put(conf.substring("ItemAuthority.reciprocalMetadata.".length()), + configurationService.getProperty(conf)); + reciprocalMetadataMap.put(configurationService.getProperty(conf), + conf.substring("ItemAuthority.reciprocalMetadata.".length())); + } } @Override diff --git a/dspace-api/src/main/java/org/dspace/content/authority/RorOrgUnitAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/RorOrgUnitAuthority.java new file mode 100644 index 000000000000..de2271901819 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/authority/RorOrgUnitAuthority.java @@ -0,0 +1,152 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.content.authority; + +import java.util.Collection; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.content.authority.factory.ItemAuthorityServiceFactory; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.dspace.importer.external.ror.service.RorImportMetadataSourceServiceImpl; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.utils.DSpace; + +public class RorOrgUnitAuthority extends ItemAuthority { + + private final RorImportMetadataSourceServiceImpl rorImportMetadataSource = new DSpace().getServiceManager() + .getServicesByType(RorImportMetadataSourceServiceImpl.class).get(0); + + private final ItemAuthorityServiceFactory itemAuthorityServiceFactory = + dspace.getServiceManager().getServiceByName("itemAuthorityServiceFactory", ItemAuthorityServiceFactory.class); + private final ConfigurationService configurationService = + DSpaceServicesFactory.getInstance().getConfigurationService(); + + private String authorityName; + + @Override + public Choices getMatches(String text, int start, int limit, String locale) { + + super.setPluginInstanceName(authorityName); + Choices solrChoices = super.getMatches(text, start, limit, locale); + + try { + return solrChoices.values.length == 0 ? getRORApiMatches(text, start, limit) : solrChoices; + } catch (MetadataSourceException e) { + throw new RuntimeException(e); + } + } + + private Choices getRORApiMatches(String text, int start, int limit) throws MetadataSourceException { + Choice[] rorApiChoices = getChoiceFromRORQueryResults(rorImportMetadataSource.getRecords(text, 0, 0)) + .toArray(new Choice[0]); + + int confidenceValue = itemAuthorityServiceFactory.getInstance(authorityName) + .getConfidenceForChoices(rorApiChoices); + + return new Choices(rorApiChoices, start, rorApiChoices.length, confidenceValue, + rorApiChoices.length > (start + limit), 0); + } + + private List getChoiceFromRORQueryResults(Collection orgUnits) { + return orgUnits + .stream() + .map(orgUnit -> new Choice(composeAuthorityValue(getIdentifier(orgUnit)), getName(orgUnit), + getName(orgUnit), buildExtras(orgUnit))) + .collect(Collectors.toList()); + } + + private String getIdentifier(ImportRecord orgUnit) { + return orgUnit.getValue("organization", "identifier", "ror").stream() + .findFirst() + .map(metadata -> metadata.getValue()) + .orElse(null); + } + + private String getName(ImportRecord orgUnit) { + return orgUnit.getValue("dc", "title", null).stream() + .findFirst() + .map(metadata -> metadata.getValue()) + .orElse(null); + } + + private Map buildExtras(ImportRecord orgUnit) { + + Map extras = new LinkedHashMap(); + + addExtra(extras, getIdentifier(orgUnit), "id"); + + orgUnit.getSingleValue("dc", "type", null) + .ifPresent(type -> addExtra(extras, type, "type")); + + String acronym = orgUnit.getValue("oairecerif", "acronym", null).stream() + .map(MetadatumDTO::getValue) + .collect(Collectors.joining(", ")); + + if (StringUtils.isNotBlank(acronym)) { + addExtra(extras, acronym, "acronym"); + } + + return extras; + } + + private void addExtra(Map extras, String value, String extraType) { + + String key = getKey(extraType); + + if (useAsData(extraType)) { + extras.put("data-" + key, value); + } + if (useForDisplaying(extraType)) { + extras.put(key, value); + } + + } + + private boolean useForDisplaying(String extraType) { + return configurationService.getBooleanProperty("cris.OrcidAuthority." + + getPluginInstanceName() + "." + extraType + ".display", true); + } + + private boolean useAsData(String extraType) { + return configurationService.getBooleanProperty("cris.OrcidAuthority." + + getPluginInstanceName() + "." + extraType + ".as-data", true); + } + + private String getKey(String extraType) { + return configurationService.getProperty("cris.OrcidAuthority." + + getPluginInstanceName() + "." + extraType + ".key", "ror_orgunit_" + extraType); + } + + private String composeAuthorityValue(String rorId) { + String prefix = configurationService.getProperty("ror.authority.prefix", "will be referenced::ROR-ID::"); + return prefix + rorId; + } + + @Override + public String getLinkedEntityType() { + return configurationService.getProperty("cris.ItemAuthority." + authorityName + ".entityType"); + } + + @Override + public void setPluginInstanceName(String name) { + authorityName = name; + } + + @Override + public String getPluginInstanceName() { + return authorityName; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/authority/SherpaAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/SherpaAuthority.java index 44bd406ce43c..54d8f3325ceb 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/SherpaAuthority.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/SherpaAuthority.java @@ -173,9 +173,4 @@ private boolean isLocalItemChoicesEnabled() { return configurationService.getBooleanProperty("cris." + this.authorityName + ".local-item-choices-enabled"); } - @Override - public Map getExternalSource() { - return Map.of(); - } - } \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java index 497fa08f2faf..123626cd0965 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java @@ -200,8 +200,8 @@ protected void addExternalResults(String text, ArrayList choices, List findDuplicateInternalIdentifier(Context context, Bitstrea @Override public List findBitstreamsWithNoRecentChecksum(Context context) throws SQLException { - Query query = createQuery(context, - "select b from Bitstream b where b not in (select c.bitstream from " + - "MostRecentChecksum c)"); + Query query = createQuery(context, "SELECT b FROM MostRecentChecksum c RIGHT JOIN Bitstream b " + + "ON c.bitstream = b WHERE c IS NULL" ); + return query.getResultList(); } @Override public Iterator findByCommunity(Context context, Community community) throws SQLException { - Query query = createQuery(context, "select b from Bitstream b " + + Query query = createQuery(context, "select b.id from Bitstream b " + "join b.bundles bitBundles " + "join bitBundles.items item " + "join item.collections itemColl " + @@ -85,40 +86,43 @@ public Iterator findByCommunity(Context context, Community community) "WHERE :community IN community"); query.setParameter("community", community); - - return iterate(query); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Bitstream.class, this); } @Override public Iterator findByCollection(Context context, Collection collection) throws SQLException { - Query query = createQuery(context, "select b from Bitstream b " + + Query query = createQuery(context, "select b.id from Bitstream b " + "join b.bundles bitBundles " + "join bitBundles.items item " + "join item.collections c " + "WHERE :collection IN c"); query.setParameter("collection", collection); - - return iterate(query); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Bitstream.class, this); } @Override public Iterator findByItem(Context context, Item item) throws SQLException { - Query query = createQuery(context, "select b from Bitstream b " + + Query query = createQuery(context, "select b.id from Bitstream b " + "join b.bundles bitBundles " + "join bitBundles.items item " + "WHERE :item IN item"); query.setParameter("item", item); - - return iterate(query); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Bitstream.class, this); } @Override public Iterator findShowableByItem(Context context, UUID itemId, String bundleName) throws SQLException { Query query = createQuery( context, - "select b from Bitstream b " + + "select b.id from Bitstream b " + "join b.bundles bitBundle " + "join bitBundle.items item " + "WHERE item.id = :itemId " + @@ -150,15 +154,18 @@ public Iterator findShowableByItem(Context context, UUID itemId, Stri query.setParameter("itemId", itemId); query.setParameter("bundleName", bundleName); - - return iterate(query); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Bitstream.class, this); } @Override public Iterator findByStoreNumber(Context context, Integer storeNumber) throws SQLException { - Query query = createQuery(context, "select b from Bitstream b where b.storeNumber = :storeNumber"); + Query query = createQuery(context, "select b.id from Bitstream b where b.storeNumber = :storeNumber"); query.setParameter("storeNumber", storeNumber); - return iterate(query); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Bitstream.class, this); } @Override diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java index 378084ee8c43..3b12c68dcedd 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java @@ -29,6 +29,7 @@ import org.dspace.content.dao.ItemDAO; import org.dspace.core.AbstractHibernateDSODAO; import org.dspace.core.Context; +import org.dspace.core.UUIDIterator; import org.dspace.eperson.EPerson; import org.hibernate.Criteria; import org.hibernate.criterion.Criterion; @@ -56,28 +57,34 @@ protected ItemDAOImpl() { @Override public Iterator findAll(Context context, boolean archived) throws SQLException { - Query query = createQuery(context, "FROM Item WHERE inArchive=:in_archive ORDER BY id"); + Query query = createQuery(context, "SELECT i.id FROM Item i WHERE inArchive=:in_archive ORDER BY id"); query.setParameter("in_archive", archived); - return iterate(query); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override public Iterator findAll(Context context, boolean archived, int limit, int offset) throws SQLException { - Query query = createQuery(context, "FROM Item WHERE inArchive=:in_archive ORDER BY id"); + Query query = createQuery(context, "SELECT i.id FROM Item i WHERE inArchive=:in_archive ORDER BY id"); query.setParameter("in_archive", archived); query.setFirstResult(offset); query.setMaxResults(limit); - return iterate(query); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override public Iterator findAll(Context context, boolean archived, boolean withdrawn) throws SQLException { Query query = createQuery(context, - "FROM Item WHERE inArchive=:in_archive or withdrawn=:withdrawn ORDER BY id"); + "SELECT i.id FROM Item i WHERE inArchive=:in_archive or withdrawn=:withdrawn ORDER BY id"); query.setParameter("in_archive", archived); query.setParameter("withdrawn", withdrawn); - return iterate(query); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override @@ -86,12 +93,14 @@ public Iterator findAllRegularItems(Context context) throws SQLException { // It does not include workspace, workflow or template items. Query query = createQuery( context, - "SELECT i FROM Item as i " + + "SELECT i.id FROM Item as i " + "LEFT JOIN Version as v ON i = v.item " + "WHERE i.inArchive=true or i.withdrawn=true or (i.inArchive=false and v.id IS NOT NULL) " + "ORDER BY i.id" ); - return iterate(query); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override @@ -99,7 +108,7 @@ public Iterator findAll(Context context, boolean archived, boolean withdrawn, boolean discoverable, Date lastModified) throws SQLException { StringBuilder queryStr = new StringBuilder(); - queryStr.append("SELECT i FROM Item i"); + queryStr.append("SELECT i.id FROM Item i"); queryStr.append(" WHERE (inArchive = :in_archive OR withdrawn = :withdrawn)"); queryStr.append(" AND discoverable = :discoverable"); @@ -115,16 +124,20 @@ public Iterator findAll(Context context, boolean archived, if (lastModified != null) { query.setParameter("last_modified", lastModified, TemporalType.TIMESTAMP); } - return iterate(query); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override public Iterator findBySubmitter(Context context, EPerson eperson) throws SQLException { Query query = createQuery(context, - "FROM Item WHERE inArchive=:in_archive and submitter=:submitter ORDER BY id"); + "SELECT i.id FROM Item i WHERE inArchive=:in_archive and submitter=:submitter ORDER BY id"); query.setParameter("in_archive", true); query.setParameter("submitter", eperson); - return iterate(query); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override @@ -133,16 +146,18 @@ public Iterator findBySubmitter(Context context, EPerson eperson, boolean if (!retrieveAllItems) { return findBySubmitter(context, eperson); } - Query query = createQuery(context, "FROM Item WHERE submitter=:submitter ORDER BY id"); + Query query = createQuery(context, "SELECT i.id FROM Item i WHERE submitter=:submitter ORDER BY id"); query.setParameter("submitter", eperson); - return iterate(query); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override public Iterator findBySubmitter(Context context, EPerson eperson, MetadataField metadataField, int limit) throws SQLException { StringBuilder query = new StringBuilder(); - query.append("SELECT item FROM Item as item "); + query.append("SELECT item.id FROM Item as item "); addMetadataLeftJoin(query, Item.class.getSimpleName().toLowerCase(), Collections.singletonList(metadataField)); query.append(" WHERE item.inArchive = :in_archive"); query.append(" AND item.submitter =:submitter"); @@ -154,13 +169,15 @@ public Iterator findBySubmitter(Context context, EPerson eperson, Metadata hibernateQuery.setParameter("in_archive", true); hibernateQuery.setParameter("submitter", eperson); hibernateQuery.setMaxResults(limit); - return iterate(hibernateQuery); + @SuppressWarnings("unchecked") + List uuids = hibernateQuery.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override public Iterator findByMetadataField(Context context, MetadataField metadataField, String value, boolean inArchive) throws SQLException { - String hqlQueryString = "SELECT item FROM Item as item join item.metadata metadatavalue " + + String hqlQueryString = "SELECT item.id FROM Item as item join item.metadata metadatavalue " + "WHERE item.inArchive=:in_archive AND metadatavalue.metadataField = :metadata_field"; if (value != null) { hqlQueryString += " AND STR(metadatavalue.value) = :text_value"; @@ -172,13 +189,15 @@ public Iterator findByMetadataField(Context context, MetadataField metadat if (value != null) { query.setParameter("text_value", value); } - return iterate(query); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override public Iterator findByMetadataField(Context context, MetadataField metadataField, String value) throws SQLException { - String hqlQueryString = "SELECT item FROM Item as item join item.metadata metadatavalue " + + String hqlQueryString = "SELECT item.id FROM Item as item join item.metadata metadatavalue " + "WHERE metadatavalue.metadataField = :metadata_field"; if (value != null) { hqlQueryString += " AND STR(metadatavalue.value) = :text_value"; @@ -189,7 +208,9 @@ public Iterator findByMetadataField(Context context, MetadataField metadat if (value != null) { query.setParameter("text_value", value); } - return iterate(query); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } enum OP { @@ -310,20 +331,22 @@ public Iterator findByMetadataQuery(Context context, List findByAuthorityValue(Context context, MetadataField metadataField, String authority, boolean inArchive) throws SQLException { Query query = createQuery(context, - "SELECT item FROM Item as item join item.metadata metadatavalue " + + "SELECT item.id FROM Item as item join item.metadata metadatavalue " + "WHERE item.inArchive=:in_archive AND metadatavalue.metadataField = :metadata_field AND " + "metadatavalue.authority = :authority ORDER BY item.id"); query.setParameter("in_archive", inArchive); query.setParameter("metadata_field", metadataField); query.setParameter("authority", authority); - return iterate(query); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override public Iterator findArchivedByCollection(Context context, Collection collection, Integer limit, Integer offset) throws SQLException { Query query = createQuery(context, - "select i from Item i join i.collections c " + + "select i.id from Item i join i.collections c " + "WHERE :collection IN c AND i.inArchive=:in_archive ORDER BY i.id"); query.setParameter("collection", collection); query.setParameter("in_archive", true); @@ -333,7 +356,9 @@ public Iterator findArchivedByCollection(Context context, Collection colle if (limit != null) { query.setMaxResults(limit); } - return iterate(query); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override @@ -368,17 +393,18 @@ public int countArchivedByCollectionExcludingOwning(Context context, Collection @Override public Iterator findAllByCollection(Context context, Collection collection) throws SQLException { Query query = createQuery(context, - "select i from Item i join i.collections c WHERE :collection IN c ORDER BY i.id"); + "select i.id from Item i join i.collections c WHERE :collection IN c ORDER BY i.id"); query.setParameter("collection", collection); - - return iterate(query); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override public Iterator findAllByCollection(Context context, Collection collection, Integer limit, Integer offset) throws SQLException { Query query = createQuery(context, - "select i from Item i join i.collections c WHERE :collection IN c ORDER BY i.id"); + "select i.id from Item i join i.collections c WHERE :collection IN c ORDER BY i.id"); query.setParameter("collection", collection); if (offset != null) { @@ -387,8 +413,9 @@ public Iterator findAllByCollection(Context context, Collection collection if (limit != null) { query.setMaxResults(limit); } - - return iterate(query); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override @@ -424,9 +451,12 @@ public int countItems(Context context, List collections, boolean inc public Iterator findByLastModifiedSince(Context context, Date since) throws SQLException { Query query = createQuery(context, - "SELECT i FROM Item i WHERE last_modified > :last_modified ORDER BY id"); + "SELECT i.id FROM Item i WHERE last_modified > :last_modified ORDER BY id"); query.setParameter("last_modified", since, TemporalType.TIMESTAMP); - return iterate(query); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); + } @Override @@ -462,22 +492,21 @@ public Iterator findByLikeAuthorityValue(Context context, String likeAuthority, Boolean inArchive) throws SQLException { String allItems = Objects.isNull(inArchive) ? "" : " item.inArchive=:in_archive AND "; Query query = createQuery(context, - "SELECT DISTINCT item FROM Item as item join item.metadata metadatavalue " + "SELECT DISTINCT item.id FROM Item as item join item.metadata metadatavalue " + "WHERE" + allItems + " metadatavalue.authority like :authority ORDER BY item.id"); if (Objects.nonNull(inArchive)) { query.setParameter("in_archive", inArchive); } query.setParameter("authority", likeAuthority); - return iterate(query); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override public Iterator findByIds(Context context, List ids) throws SQLException { - Query query = createQuery(context, - "SELECT item " + "FROM Item as item WHERE item.id IN (:ids)"); - query.setParameter("ids", ids); - return iterate(query); + return new UUIDIterator(context, ids, Item.class, this); } } diff --git a/dspace-api/src/main/java/org/dspace/content/dto/MetadataValueDTO.java b/dspace-api/src/main/java/org/dspace/content/dto/MetadataValueDTO.java index 630efd5b0284..7bfa8504f902 100644 --- a/dspace-api/src/main/java/org/dspace/content/dto/MetadataValueDTO.java +++ b/dspace-api/src/main/java/org/dspace/content/dto/MetadataValueDTO.java @@ -69,6 +69,14 @@ public MetadataValueDTO(String schema, String element, String qualifier, String this.confidence = confidence; } + public MetadataValueDTO(String metadataField, String value) { + MetadataFieldName fieldName = new MetadataFieldName(metadataField); + this.schema = fieldName.schema; + this.element = fieldName.element; + this.qualifier = fieldName.qualifier; + this.value = value; + } + /** * Constructor for the MetadataValueDTO class * @param schema The schema to be assigned to this MetadataValueDTO object diff --git a/dspace-api/src/main/java/org/dspace/content/edit/CorrectItemMode.java b/dspace-api/src/main/java/org/dspace/content/edit/CorrectItemMode.java index b374861db9a3..2945065db4ea 100644 --- a/dspace-api/src/main/java/org/dspace/content/edit/CorrectItemMode.java +++ b/dspace-api/src/main/java/org/dspace/content/edit/CorrectItemMode.java @@ -10,6 +10,7 @@ import java.util.ArrayList; import java.util.List; +import org.dspace.content.logic.Filter; import org.dspace.content.security.AccessItemMode; import org.dspace.content.security.CrisSecurity; @@ -42,6 +43,7 @@ public class CorrectItemMode implements AccessItemMode { * Contains the list of users metadata for CUSTOM security */ private List items = new ArrayList(); + private Filter additionalFilter; @Override public List getSecurities() { @@ -87,4 +89,13 @@ public void setItems(List items) { public List getGroups() { return groups; } + + public void setAdditionalFilter(Filter additionalFilter) { + this.additionalFilter = additionalFilter; + } + + @Override + public Filter getAdditionalFilter() { + return additionalFilter; + } } diff --git a/dspace-api/src/main/java/org/dspace/content/edit/EditItemMode.java b/dspace-api/src/main/java/org/dspace/content/edit/EditItemMode.java index 4d56ddafe731..6f6b33ecaa28 100644 --- a/dspace-api/src/main/java/org/dspace/content/edit/EditItemMode.java +++ b/dspace-api/src/main/java/org/dspace/content/edit/EditItemMode.java @@ -9,6 +9,7 @@ import java.util.List; +import org.dspace.content.logic.Filter; import org.dspace.content.security.AccessItemMode; import org.dspace.content.security.CrisSecurity; @@ -49,6 +50,7 @@ public class EditItemMode implements AccessItemMode { * Contains the list of items metadata for CUSTOM security */ private List items; + private Filter additionalFilter; @Override public List getSecurities() { @@ -100,6 +102,15 @@ public void setItems(List items) { this.items = items; } + public void setAdditionalFilter(Filter additionalFilter) { + this.additionalFilter = additionalFilter; + } + + @Override + public Filter getAdditionalFilter() { + return additionalFilter; + } + @Override public List getGroups() { return groups; diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/ItemEnhancer.java b/dspace-api/src/main/java/org/dspace/content/enhancer/ItemEnhancer.java index f0e3d9d15649..ee6bdbf1dff6 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/ItemEnhancer.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/ItemEnhancer.java @@ -45,6 +45,10 @@ public interface ItemEnhancer { * * @param context the DSpace Context * @param item the item to enhance + * @param deepMode false, if the implementation can assume that only the target + * item as been updated since the eventual previous computation of enhanced metadata + * @return true, if any changes have been performed on the provided item */ - void enhance(Context context, Item item); + boolean enhance(Context context, Item item, boolean deepMode); + } diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumer.java b/dspace-api/src/main/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumer.java index c6baab84048d..c526537bf5ac 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumer.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumer.java @@ -15,6 +15,8 @@ import org.dspace.core.Context; import org.dspace.event.Consumer; import org.dspace.event.Event; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.utils.DSpace; /** @@ -26,10 +28,13 @@ */ public class ItemEnhancerConsumer implements Consumer { + public static final String ITEMENHANCER_ENABLED = "itemenhancer.enabled"; private Set itemsAlreadyProcessed = new HashSet(); private ItemEnhancerService itemEnhancerService; + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + @Override public void finish(Context ctx) throws Exception { @@ -43,6 +48,10 @@ public void initialize() throws Exception { @Override public void consume(Context context, Event event) throws Exception { + if (!isConsumerEnabled()) { + return; + } + Item item = (Item) event.getSubject(context); if (item == null || itemsAlreadyProcessed.contains(item) || !item.isArchived()) { return; @@ -52,13 +61,17 @@ public void consume(Context context, Event event) throws Exception { context.turnOffAuthorisationSystem(); try { - itemEnhancerService.enhance(context, item); + itemEnhancerService.enhance(context, item, false); } finally { context.restoreAuthSystemState(); } } + protected boolean isConsumerEnabled() { + return configurationService.getBooleanProperty(ITEMENHANCER_ENABLED, true); + } + @Override public void end(Context ctx) throws Exception { itemsAlreadyProcessed.clear(); diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java b/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java index a5d95582e41d..f17e36ee90a9 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java @@ -15,12 +15,12 @@ import java.util.Objects; import java.util.Optional; import java.util.UUID; -import java.util.function.Consumer; -import java.util.function.Predicate; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; import org.dspace.content.Item; import org.dspace.content.MetadataValue; +import org.dspace.content.authority.Choices; +import org.dspace.content.dto.MetadataValueDTO; import org.dspace.content.enhancer.AbstractItemEnhancer; import org.dspace.content.enhancer.ItemEnhancer; import org.dspace.content.service.ItemService; @@ -57,65 +57,195 @@ public boolean canEnhance(Context context, Item item) { } @Override - public void enhance(Context context, Item item) { - try { - cleanObsoleteVirtualFields(context, item); - updateVirtualFieldsPlaces(context, item); - performEnhancement(context, item); - } catch (SQLException e) { - LOGGER.error("An error occurs enhancing item with id {}: {}", item.getID(), e.getMessage(), e); - throw new SQLRuntimeException(e); + public boolean enhance(Context context, Item item, boolean deepMode) { + boolean result = false; + if (!deepMode) { + try { + result = cleanObsoleteVirtualFields(context, item); + result = updateVirtualFieldsPlaces(context, item) || result; + result = performEnhancement(context, item) || result; + } catch (SQLException e) { + LOGGER.error("An error occurs enhancing item with id {}: {}", item.getID(), e.getMessage(), e); + throw new SQLRuntimeException(e); + } + } else { + List currMetadataValues = getCurrentVirtualMetadata(context, item); + List toBeMetadataValues = getToBeVirtualMetadata(context, item); + if (!equivalent(currMetadataValues, toBeMetadataValues)) { + try { + itemService.removeMetadataValues(context, item, currMetadataValues); + addMetadata(context, item, toBeMetadataValues); + } catch (SQLException e) { + throw new SQLRuntimeException(e); + } + result = true; + } + } + return result; + } + + private void addMetadata(Context context, Item item, List toBeMetadataValues) + throws SQLException { + for (MetadataValueDTO dto : toBeMetadataValues) { + itemService.addMetadata(context, item, dto.getSchema(), dto.getElement(), dto.getQualifier(), null, + dto.getValue(), dto.getAuthority(), dto.getConfidence()); + } + } + + private boolean equivalent(List currMetadataValues, List toBeMetadataValues) { + if (currMetadataValues.size() != toBeMetadataValues.size()) { + return false; + } else { + for (int idx = 0; idx < currMetadataValues.size(); idx++) { + if (!equivalent(currMetadataValues.get(idx), toBeMetadataValues.get(idx))) { + return false; + } + } + } + return true; + } + + private boolean equivalent(MetadataValue metadataValue, MetadataValueDTO metadataValueDTO) { + return StringUtils.equals(metadataValue.getMetadataField().getMetadataSchema().getName(), + metadataValueDTO.getSchema()) + && StringUtils.equals(metadataValue.getMetadataField().getElement(), metadataValueDTO.getElement()) + && StringUtils.equals(metadataValue.getMetadataField().getQualifier(), metadataValueDTO.getQualifier()) + && StringUtils.equals(metadataValue.getValue(), metadataValueDTO.getValue()) + && StringUtils.equals(metadataValue.getAuthority(), metadataValueDTO.getAuthority()); + } + + private List getToBeVirtualMetadata(Context context, Item item) { + List tobeVirtualMetadata = new ArrayList<>(); + List virtualSourceFields = getEnhanceableMetadataValue(item); + for (MetadataValue virtualSourceField : virtualSourceFields) { + MetadataValueDTO mv = new MetadataValueDTO(); + mv.setSchema(VIRTUAL_METADATA_SCHEMA); + mv.setElement(VIRTUAL_SOURCE_METADATA_ELEMENT); + mv.setQualifier(getVirtualQualifier()); + String authority = virtualSourceField.getAuthority(); + Item relatedItem = null; + if (StringUtils.isNotBlank(authority)) { + mv.setValue(authority); + relatedItem = findRelatedEntityItem(context, virtualSourceField); + } else { + mv.setValue(PLACEHOLDER_PARENT_METADATA_VALUE); + } + tobeVirtualMetadata.add(mv); + if (relatedItem == null) { + MetadataValueDTO mvRelated = new MetadataValueDTO(); + mvRelated.setSchema(VIRTUAL_METADATA_SCHEMA); + mvRelated.setElement(VIRTUAL_METADATA_ELEMENT); + mvRelated.setQualifier(getVirtualQualifier()); + mvRelated.setValue(PLACEHOLDER_PARENT_METADATA_VALUE); + tobeVirtualMetadata.add(mvRelated); + continue; + } + + List relatedItemMetadataValues = getMetadataValues(relatedItem, relatedItemMetadataField); + if (relatedItemMetadataValues.isEmpty()) { + MetadataValueDTO mvRelated = new MetadataValueDTO(); + mvRelated.setSchema(VIRTUAL_METADATA_SCHEMA); + mvRelated.setElement(VIRTUAL_METADATA_ELEMENT); + mvRelated.setQualifier(getVirtualQualifier()); + mvRelated.setValue(PLACEHOLDER_PARENT_METADATA_VALUE); + tobeVirtualMetadata.add(mvRelated); + continue; + } + for (MetadataValue relatedItemMetadataValue : relatedItemMetadataValues) { + MetadataValueDTO mvRelated = new MetadataValueDTO(); + mvRelated.setSchema(VIRTUAL_METADATA_SCHEMA); + mvRelated.setElement(VIRTUAL_METADATA_ELEMENT); + mvRelated.setQualifier(getVirtualQualifier()); + mvRelated.setValue(relatedItemMetadataValue.getValue()); + String authorityRelated = relatedItemMetadataValue.getAuthority(); + if (StringUtils.isNotBlank(authorityRelated)) { + mvRelated.setAuthority(authorityRelated); + mvRelated.setConfidence(Choices.CF_ACCEPTED); + } + tobeVirtualMetadata.add(mvRelated); + } } + return tobeVirtualMetadata; } - private void cleanObsoleteVirtualFields(Context context, Item item) throws SQLException { + private List getCurrentVirtualMetadata(Context context, Item item) { + List currentVirtualMetadata = new ArrayList<>(); + List virtualSourceFields = getVirtualSourceFields(item); + for (MetadataValue virtualSourceField : virtualSourceFields) { + currentVirtualMetadata.add(virtualSourceField); + getRelatedVirtualField(item, virtualSourceField).ifPresent(currentVirtualMetadata::add); + } + return currentVirtualMetadata; + } + private boolean cleanObsoleteVirtualFields(Context context, Item item) throws SQLException { + boolean result = false; List metadataValuesToDelete = getObsoleteVirtualFields(item); if (!metadataValuesToDelete.isEmpty()) { itemService.removeMetadataValues(context, item, metadataValuesToDelete); + result = true; } - + return result; } - private void updateVirtualFieldsPlaces(Context context, Item item) { - List virtualSourceFields = getMetadataValues(item, getVirtualSourceMetadataField()); + private boolean updateVirtualFieldsPlaces(Context context, Item item) { + boolean result = false; + List virtualSourceFields = getVirtualSourceFields(item); + List enhanceableMetadataValue = getEnhanceableMetadataValue(item); for (MetadataValue virtualSourceField : virtualSourceFields) { - metadataWithPlaceToUpdate(item, virtualSourceField) - .ifPresent(updatePlaces(item, virtualSourceField)); + Optional metadataWithPlaceToUpdate = metadataWithPlaceToUpdate(item, + enhanceableMetadataValue, virtualSourceField); + if (metadataWithPlaceToUpdate.isPresent()) { + updatePlaces(item, metadataWithPlaceToUpdate.get(), virtualSourceField); + result = true; + } } + return result; } - private Optional metadataWithPlaceToUpdate(Item item, MetadataValue virtualSourceField) { - return findEnhanceableValue(virtualSourceField, item) - .filter(hasToUpdatePlace(virtualSourceField)) - .stream().findFirst(); + private Optional metadataWithPlaceToUpdate(Item item, List enhanceableMetadataValue, + MetadataValue virtualSourceField) { + return findMetadataValueToUpdatePlace(enhanceableMetadataValue, virtualSourceField, + item); } - private Predicate hasToUpdatePlace(MetadataValue virtualSourceField) { - return metadataValue -> metadataValue.getPlace() != virtualSourceField.getPlace(); + private boolean hasToUpdatePlace(MetadataValue metadataValue, MetadataValue virtualSourceField) { + return metadataValue.getPlace() != virtualSourceField.getPlace(); } - private Consumer updatePlaces(Item item, MetadataValue virtualSourceField) { - return mv -> { - virtualSourceField.setPlace(mv.getPlace()); - getRelatedVirtualField(item, mv) - .ifPresent(relatedMv -> relatedMv.setPlace(mv.getPlace())); - }; + private void updatePlaces(Item item, MetadataValue mv, MetadataValue virtualSourceField) { + virtualSourceField.setPlace(mv.getPlace()); + getRelatedVirtualField(item, mv) + .ifPresent(relatedMv -> relatedMv.setPlace(mv.getPlace())); } - private Optional findEnhanceableValue(MetadataValue virtualSourceField, Item item) { - return getEnhanceableMetadataValue(item).stream() - .filter(metadataValue -> hasAuthorityEqualsTo(metadataValue, virtualSourceField.getValue())) - .findFirst(); + private Optional findMetadataValueToUpdatePlace(List enhanceableMetadataValue, + MetadataValue virtualSourceField, Item item) { + Optional exactMatch = enhanceableMetadataValue.stream() + .filter(metadataValue -> hasAuthorityEqualsTo(metadataValue, + virtualSourceField.getValue()) && !hasToUpdatePlace(metadataValue, virtualSourceField)) + .findFirst(); + if (exactMatch.isPresent()) { + enhanceableMetadataValue.remove(exactMatch.get()); + return Optional.empty(); + } else { + Optional authorityOnlyMatch = enhanceableMetadataValue.stream() + .filter(metadataValue -> hasAuthorityEqualsTo(metadataValue, + virtualSourceField.getValue()) && hasToUpdatePlace(metadataValue, virtualSourceField)) + .findFirst(); + enhanceableMetadataValue.remove(authorityOnlyMatch.get()); + return authorityOnlyMatch; + } } private List getObsoleteVirtualFields(Item item) { List obsoleteVirtualFields = new ArrayList<>(); - List virtualSourceFields = getMetadataValues(item, getVirtualSourceMetadataField()); + List virtualSourceFields = getVirtualSourceFields(item); + List enhanceableMetadata = getEnhanceableMetadataValue(item); for (MetadataValue virtualSourceField : virtualSourceFields) { - if (isRelatedSourceNoMorePresent(item, virtualSourceField)) { + if (isRelatedSourceNoMorePresent(item, enhanceableMetadata, virtualSourceField)) { obsoleteVirtualFields.add(virtualSourceField); getRelatedVirtualField(item, virtualSourceField).ifPresent(obsoleteVirtualFields::add); } @@ -125,22 +255,40 @@ private List getObsoleteVirtualFields(Item item) { } - private boolean isRelatedSourceNoMorePresent(Item item, MetadataValue virtualSourceField) { - return getEnhanceableMetadataValue(item).stream() - .noneMatch(metadataValue -> hasAuthorityEqualsTo(metadataValue, virtualSourceField.getValue())); + /** + * This method will look in the enhanceableMetadata if the source metadata is still present. If so, it will remove + * form the list as it would not be used to validate other potential duplicate source metadata + * + * @param item + * @param enhanceableMetadata + * @param virtualSourceField + * @return true if the metadata containing a source of enhancement is still present in the list of the metadata to + * use to enhance the item + */ + private boolean isRelatedSourceNoMorePresent(Item item, List enhanceableMetadata, + MetadataValue virtualSourceField) { + Optional mv = enhanceableMetadata.stream() + .filter(metadataValue -> hasAuthorityEqualsTo(metadataValue, virtualSourceField.getValue())) + .findFirst(); + if (mv.isPresent()) { + enhanceableMetadata.remove(mv.get()); + return false; + } + return true; } private Optional getRelatedVirtualField(Item item, MetadataValue virtualSourceField) { - return getMetadataValues(item, getVirtualMetadataField()).stream() + return getVirtualFields(item).stream() .filter(metadataValue -> metadataValue.getPlace() == virtualSourceField.getPlace()) .findFirst(); } - private void performEnhancement(Context context, Item item) throws SQLException { - + private boolean performEnhancement(Context context, Item item) throws SQLException { + boolean result = false; if (noEnhanceableMetadata(context, item)) { - return; + return false; } + for (MetadataValue metadataValue : getEnhanceableMetadataValue(item)) { if (wasValueAlreadyUsedForEnhancement(item, metadataValue)) { @@ -150,7 +298,7 @@ private void performEnhancement(Context context, Item item) throws SQLException Item relatedItem = findRelatedEntityItem(context, metadataValue); if (relatedItem == null) { addVirtualField(context, item, PLACEHOLDER_PARENT_METADATA_VALUE); - addVirtualSourceField(context, item, PLACEHOLDER_PARENT_METADATA_VALUE); + addVirtualSourceField(context, item, metadataValue); continue; } @@ -164,9 +312,9 @@ private void performEnhancement(Context context, Item item) throws SQLException addVirtualField(context, item, relatedItemMetadataValue.getValue()); addVirtualSourceField(context, item, metadataValue); } - + result = true; } - + return result; } private boolean noEnhanceableMetadata(Context context, Item item) { @@ -177,13 +325,8 @@ private boolean noEnhanceableMetadata(Context context, Item item) { } private boolean validAuthority(Context context, MetadataValue metadataValue) { - - // FIXME: we could find a more efficient way, here we are doing twice the same action - // to understand if the enhanced item has at least an item whose references should be put in virtual fields. Item relatedItem = findRelatedEntityItem(context, metadataValue); - return Objects.nonNull(relatedItem) && - CollectionUtils.isNotEmpty( - getMetadataValues(relatedItem, relatedItemMetadataField)); + return Objects.nonNull(relatedItem); } private List getEnhanceableMetadataValue(Item item) { @@ -191,13 +334,25 @@ private List getEnhanceableMetadataValue(Item item) { } private boolean wasValueAlreadyUsedForEnhancement(Item item, MetadataValue metadataValue) { - return getMetadataValues(item, getVirtualSourceMetadataField()).stream() + + if (isPlaceholderAtPlace(getVirtualFields(item), metadataValue.getPlace())) { + return true; + } + + return getVirtualSourceFields(item).stream() .anyMatch(virtualSourceField -> virtualSourceField.getPlace() == metadataValue.getPlace() && hasAuthorityEqualsTo(metadataValue, virtualSourceField.getValue())); + + } + + private boolean isPlaceholderAtPlace(List metadataValues, int place) { + return place < metadataValues.size() ? isPlaceholder(metadataValues.get(place)) : false; } private boolean hasAuthorityEqualsTo(MetadataValue metadataValue, String authority) { - return Objects.equals(metadataValue.getAuthority(), authority); + return Objects.equals(metadataValue.getAuthority(), authority) + || (StringUtils.isBlank(metadataValue.getAuthority()) + && Objects.equals(PLACEHOLDER_PARENT_METADATA_VALUE, authority)); } private Item findRelatedEntityItem(Context context, MetadataValue metadataValue) { @@ -209,17 +364,33 @@ private Item findRelatedEntityItem(Context context, MetadataValue metadataValue) } } + private boolean isPlaceholder(MetadataValue metadataValue) { + return PLACEHOLDER_PARENT_METADATA_VALUE.equals(metadataValue.getValue()); + } + private List getMetadataValues(Item item, String metadataField) { return itemService.getMetadataByMetadataString(item, metadataField); } + private List getVirtualSourceFields(Item item) { + return getMetadataValues(item, getVirtualSourceMetadataField()); + } + + private List getVirtualFields(Item item) { + return getMetadataValues(item, getVirtualMetadataField()); + } + private void addVirtualField(Context context, Item item, String value) throws SQLException { itemService.addMetadata(context, item, VIRTUAL_METADATA_SCHEMA, VIRTUAL_METADATA_ELEMENT, getVirtualQualifier(), null, value); } private void addVirtualSourceField(Context context, Item item, MetadataValue sourceValue) throws SQLException { - addVirtualSourceField(context, item, sourceValue.getAuthority()); + if (StringUtils.isNotBlank(sourceValue.getAuthority())) { + addVirtualSourceField(context, item, sourceValue.getAuthority()); + } else { + addVirtualSourceField(context, item, PLACEHOLDER_PARENT_METADATA_VALUE); + } } private void addVirtualSourceField(Context context, Item item, String sourceValueAuthority) throws SQLException { diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/script/ItemEnhancerScript.java b/dspace-api/src/main/java/org/dspace/content/enhancer/script/ItemEnhancerScript.java index 2c4d1f203468..3100920dc17c 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/script/ItemEnhancerScript.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/script/ItemEnhancerScript.java @@ -82,14 +82,8 @@ private Iterator findItemsToEnhance() { private void enhanceItem(Item item) { - if (force) { - itemEnhancerService.forceEnhancement(context, item); - } else { - itemEnhancerService.enhance(context, item); - } - + itemEnhancerService.enhance(context, item, force); uncacheItem(item); - } private void uncacheItem(Item item) { diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/script/ItemEnhancerScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/content/enhancer/script/ItemEnhancerScriptConfiguration.java index 3822695969f2..17377f67a3dd 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/script/ItemEnhancerScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/script/ItemEnhancerScriptConfiguration.java @@ -41,7 +41,9 @@ public Options getOptions() { if (options == null) { Options options = new Options(); - options.addOption("f", "force", false, "force the recalculation of all the virtual fields"); + options.addOption("f", "force", false, + "force the usage of the deep mode" + + " (always compute the enhanced metadata to verify if the item need an update)"); options.getOption("f").setType(boolean.class); options.getOption("f").setRequired(false); diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/service/ItemEnhancerService.java b/dspace-api/src/main/java/org/dspace/content/enhancer/service/ItemEnhancerService.java index 5b3b419bfa8f..08170448e681 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/service/ItemEnhancerService.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/service/ItemEnhancerService.java @@ -24,15 +24,9 @@ public interface ItemEnhancerService { * * @param context the DSpace Context * @param item the item to enhance + * @param deepMode false, if the implementation can assume that only the target + * item as been updated since the eventual previous computation of enhanced metadata */ - void enhance(Context context, Item item); + void enhance(Context context, Item item, boolean deepMode); - /** - * Remove all the already calculated virtual metadata fields from the given item - * and perform a new enhancement. - * - * @param context the DSpace Context - * @param item the item to enhance - */ - void forceEnhancement(Context context, Item item); } diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/service/impl/ItemEnhancerServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/enhancer/service/impl/ItemEnhancerServiceImpl.java index 90f9181a5e37..e751a431ac37 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/service/impl/ItemEnhancerServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/service/impl/ItemEnhancerServiceImpl.java @@ -7,23 +7,15 @@ */ package org.dspace.content.enhancer.service.impl; -import static org.dspace.content.Item.ANY; -import static org.dspace.content.enhancer.ItemEnhancer.VIRTUAL_METADATA_ELEMENT; -import static org.dspace.content.enhancer.ItemEnhancer.VIRTUAL_METADATA_SCHEMA; -import static org.dspace.content.enhancer.ItemEnhancer.VIRTUAL_SOURCE_METADATA_ELEMENT; - import java.sql.SQLException; import java.util.List; -import org.apache.commons.collections4.ListUtils; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Item; -import org.dspace.content.MetadataValue; import org.dspace.content.enhancer.ItemEnhancer; import org.dspace.content.enhancer.service.ItemEnhancerService; import org.dspace.content.service.ItemService; import org.dspace.core.Context; -import org.dspace.core.exception.SQLRuntimeException; import org.springframework.beans.factory.annotation.Autowired; /** @@ -41,38 +33,18 @@ public class ItemEnhancerServiceImpl implements ItemEnhancerService { private ItemService itemService; @Override - public void enhance(Context context, Item item) { - - itemEnhancers.stream() - .filter(itemEnhancer -> itemEnhancer.canEnhance(context, item)) - .forEach(itemEnhancer -> itemEnhancer.enhance(context, item)); - - updateItem(context, item); - - } - - @Override - public void forceEnhancement(Context context, Item item) { - cleanUpVirtualFields(context, item); - enhance(context, item); - } + public void enhance(Context context, Item item, boolean deepMode) { + boolean isUpdateNeeded = false; - private void cleanUpVirtualFields(Context context, Item item) { - - List virtualFields = getVirtualFields(item); - List virtualSourceFields = getVirtualSourceFields(item); - List metadataValuesToRemove = ListUtils.union(virtualFields, virtualSourceFields); - - if (metadataValuesToRemove.isEmpty()) { - return; + for (ItemEnhancer itemEnhancer : itemEnhancers) { + if (itemEnhancer.canEnhance(context, item)) { + isUpdateNeeded = itemEnhancer.enhance(context, item, deepMode) || isUpdateNeeded; + } } - try { - itemService.removeMetadataValues(context, item, ListUtils.union(virtualFields, virtualSourceFields)); - } catch (SQLException e) { - throw new SQLRuntimeException(e); + if (isUpdateNeeded) { + updateItem(context, item); } - } private void updateItem(Context context, Item item) { @@ -83,14 +55,6 @@ private void updateItem(Context context, Item item) { } } - private List getVirtualFields(Item item) { - return itemService.getMetadata(item, VIRTUAL_METADATA_SCHEMA, VIRTUAL_METADATA_ELEMENT, ANY, ANY); - } - - private List getVirtualSourceFields(Item item) { - return itemService.getMetadata(item, VIRTUAL_METADATA_SCHEMA, VIRTUAL_SOURCE_METADATA_ELEMENT, ANY, ANY); - } - public List getItemEnhancers() { return itemEnhancers; } diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ItemExportCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ItemExportCrosswalk.java index 3a8b5a1524d1..dba686198e8a 100644 --- a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ItemExportCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ItemExportCrosswalk.java @@ -11,6 +11,7 @@ import org.dspace.content.crosswalk.CrosswalkMode; import org.dspace.content.crosswalk.StreamDisseminationCrosswalk; +import org.dspace.core.Context; /** * Implementation of {@link StreamDisseminationCrosswalk} related to item @@ -40,4 +41,8 @@ public default Optional getEntityType() { public default CrosswalkMode getCrosswalkMode() { return CrosswalkMode.SINGLE; } + + public default boolean isAuthorized(Context context) { + return true; + } } diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/METSStreamDisseminationCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/METSStreamDisseminationCrosswalk.java new file mode 100644 index 000000000000..292a1e14f946 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/METSStreamDisseminationCrosswalk.java @@ -0,0 +1,63 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.integration.crosswalks; + +import java.io.IOException; +import java.io.OutputStream; +import java.sql.SQLException; +import javax.annotation.PostConstruct; + +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.DSpaceObject; +import org.dspace.content.crosswalk.CrosswalkException; +import org.dspace.content.crosswalk.METSDisseminationCrosswalk; +import org.dspace.content.crosswalk.StreamDisseminationCrosswalk; +import org.dspace.core.Context; +import org.jdom2.Element; +import org.jdom2.output.Format; +import org.jdom2.output.XMLOutputter; + +/** + * Implementation of {@link StreamDisseminationCrosswalk} that produces a METS + * manifest for the DSpace item as a metadata description, using + * {@link METSDisseminationCrosswalk}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class METSStreamDisseminationCrosswalk implements StreamDisseminationCrosswalk { + + private METSDisseminationCrosswalk metsDisseminationCrosswalk; + + @PostConstruct + public void setup() { + metsDisseminationCrosswalk = new METSDisseminationCrosswalk("AIP"); + } + + @Override + public boolean canDisseminate(Context context, DSpaceObject dso) { + return metsDisseminationCrosswalk.canDisseminate(dso); + } + + @Override + public void disseminate(Context context, DSpaceObject dso, OutputStream out) + throws CrosswalkException, IOException, SQLException, AuthorizeException { + + Element element = metsDisseminationCrosswalk.disseminateElement(context, dso); + + XMLOutputter xmlOutputter = new XMLOutputter(Format.getPrettyFormat()); + xmlOutputter.output(element, out); + + } + + @Override + public String getMIMEType() { + return "application/xml"; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ReferCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ReferCrosswalk.java index d54fef41ee68..519d9531cb71 100644 --- a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ReferCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ReferCrosswalk.java @@ -58,6 +58,9 @@ import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.discovery.configuration.DiscoveryConfigurationUtilsService; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.eperson.service.GroupService; import org.dspace.services.ConfigurationService; import org.dspace.util.UUIDUtils; import org.springframework.beans.factory.annotation.Autowired; @@ -94,6 +97,9 @@ public class ReferCrosswalk implements ItemExportCrosswalk { @Autowired private MetadataSecurityService metadataSecurityService; + @Autowired + private GroupService groupService; + private Converter converter; private Consumer> linesPostProcessor; @@ -116,6 +122,8 @@ public class ReferCrosswalk implements ItemExportCrosswalk { private CrosswalkMode crosswalkMode; + private List allowedGroups; + @PostConstruct private void postConstruct() throws IOException { String parent = configurationService.getProperty("dspace.dir") + File.separator + "config" + File.separator; @@ -128,6 +136,21 @@ private void postConstruct() throws IOException { } } + @Override + public boolean isAuthorized(Context context) { + if (CollectionUtils.isEmpty(allowedGroups)) { + return true; + } + + EPerson ePerson = context.getCurrentUser(); + if (ePerson == null) { + return allowedGroups.contains(Group.ANONYMOUS); + } + + return allowedGroups.stream() + .anyMatch(groupName -> isMemberOfGroupNamed(context, ePerson, groupName)); + } + @Override public void disseminate(Context context, DSpaceObject dso, OutputStream out) throws CrosswalkException, IOException, SQLException, AuthorizeException { @@ -136,6 +159,10 @@ public void disseminate(Context context, DSpaceObject dso, OutputStream out) throw new CrosswalkObjectNotSupported("Can only crosswalk an Item with the configured type: " + entityType); } + if (!isAuthorized(context)) { + throw new AuthorizeException("The current user is not allowed to perform a zip item export"); + } + List lines = getItemLines(context, dso, true); if (linesPostProcessor != null) { @@ -154,6 +181,10 @@ public void disseminate(Context context, Iterator dsoIte throw new UnsupportedOperationException("No template defined for multiple items"); } + if (!isAuthorized(context)) { + throw new AuthorizeException("The current user is not allowed to perform a zip item export"); + } + List lines = new ArrayList(); for (TemplateLine line : multipleItemsTemplateLines) { @@ -466,6 +497,15 @@ private boolean hasExpectedEntityType(Item item) { return Objects.equals(itemEntityType, entityType); } + private boolean isMemberOfGroupNamed(Context context, EPerson ePerson, String groupName) { + try { + Group group = groupService.findByName(context, groupName); + return groupService.isMember(context, ePerson, group); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + public void setConverter(Converter converter) { this.converter = converter; } @@ -525,4 +565,12 @@ public void setPubliclyReadable(boolean isPubliclyReadable) { this.publiclyReadable = isPubliclyReadable; } + public List getAllowedGroups() { + return allowedGroups; + } + + public void setAllowedGroups(List allowedGroups) { + this.allowedGroups = allowedGroups; + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/XlsCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/XlsCrosswalk.java index 026b6f375dfa..cbbfee4fb49b 100644 --- a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/XlsCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/XlsCrosswalk.java @@ -11,6 +11,7 @@ import java.io.OutputStream; import java.util.List; +import org.apache.commons.lang.StringUtils; import org.apache.poi.hssf.usermodel.HSSFWorkbook; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.Row; @@ -45,7 +46,7 @@ protected void writeRows(List> rows, OutputStream out) { int cellCount = 0; for (String field : row) { Cell cell = sheetRow.createCell(cellCount++); - cell.setCellValue(field); + cell.setCellValue(StringUtils.length(field) > 32726 ? field.substring(0, 32725) + "…" : field ); } } diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalk.java new file mode 100644 index 000000000000..2096fa037273 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalk.java @@ -0,0 +1,325 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.integration.crosswalks; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.sql.SQLException; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Bitstream; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.crosswalk.CrosswalkException; +import org.dspace.content.crosswalk.CrosswalkMode; +import org.dspace.content.crosswalk.CrosswalkObjectNotSupported; +import org.dspace.content.crosswalk.StreamDisseminationCrosswalk; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.core.exception.SQLRuntimeException; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.eperson.service.GroupService; +import org.dspace.storage.bitstore.service.BitstreamStorageService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.util.Assert; + +/** + * Implementation of {@link ItemExportCrosswalk} that export all the given items + * creating a zip. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ZipItemExportCrosswalk implements ItemExportCrosswalk { + + private static final Logger LOGGER = LoggerFactory.getLogger(ZipItemExportCrosswalk.class); + + @Autowired + private ItemService itemService; + + @Autowired + private BitstreamService bitstreamService; + + @Autowired + private BitstreamStorageService bitstreamStorageService; + + @Autowired + private GroupService groupService; + + private String zipName = "items.zip"; + + private String entityType; + + private String bitstreamBundle = "ORIGINAL"; + + private String metadataFileName; + + private StreamDisseminationCrosswalk crosswalk; + + private CrosswalkMode crosswalkMode = CrosswalkMode.MULTIPLE; + + private List allowedGroups; + + @Override + public boolean isAuthorized(Context context) { + if (CollectionUtils.isEmpty(allowedGroups)) { + return true; + } + + EPerson ePerson = context.getCurrentUser(); + if (ePerson == null) { + return allowedGroups.contains(Group.ANONYMOUS); + } + + return allowedGroups.stream() + .anyMatch(groupName -> isMemberOfGroupNamed(context, ePerson, groupName)); + } + + @Override + public boolean canDisseminate(Context context, DSpaceObject dso) { + return dso.getType() == Constants.ITEM && hasExpectedEntityType((Item) dso); + } + + @Override + public void disseminate(Context context, DSpaceObject dso, OutputStream out) + throws CrosswalkException, IOException, SQLException, AuthorizeException { + this.disseminate(context, Arrays.asList(dso).iterator(), out); + } + + @Override + public void disseminate(Context context, Iterator dsoIterator, OutputStream out) + throws CrosswalkException, IOException, SQLException, AuthorizeException { + + Assert.notNull(metadataFileName, "The name of the metadata file is required to perform a bulk item export"); + Assert.notNull(crosswalk, "An instance of DisseminationCrosswalk is required to perform a bulk item export"); + Assert.notNull(zipName, "The name of the zip to be generated is required to perform a bulk item export"); + + if (!isAuthorized(context)) { + throw new AuthorizeException("The current user is not allowed to perform a zip item export"); + } + + createZip(context, dsoIterator, out); + + } + + private void createZip(Context context, Iterator dsoIterator, OutputStream out) + throws CrosswalkObjectNotSupported, IOException { + + try (ZipOutputStream zos = new ZipOutputStream(out)) { + + while (dsoIterator.hasNext()) { + + DSpaceObject dso = dsoIterator.next(); + if (!canDisseminate(context, dso)) { + throw new CrosswalkObjectNotSupported( + "Can only crosswalk an Item with the configured type: " + entityType); + } + + try { + createFolder(context, (Item) dso, zos); + } catch (Exception ex) { + LOGGER.error("An error occurs creating folder for item " + dso.getID(), ex); + } + + } + + } + + } + + private void createFolder(Context context, Item item, ZipOutputStream zos) throws IOException { + + createMetadataEntry(context, item, zos); + + List bitstreams = getBitstreamToExport(item); + for (Bitstream bitstream : bitstreams) { + try { + addBitstreamEntry(context, item, bitstream, zos); + } catch (Exception ex) { + LOGGER.error("An error occurs adding bitstream " + bitstream.getID() + + " to the folder of item " + item.getID(), ex); + } + } + + } + + private void createMetadataEntry(Context context, Item item, ZipOutputStream zos) throws IOException { + ZipEntry metadataEntry = new ZipEntry(getFolderName(item) + "/" + getMetadataFileName()); + zos.putNextEntry(metadataEntry); + zos.write(getMetadataFileNameContent(context, item)); + zos.closeEntry(); + } + + private byte[] getMetadataFileNameContent(Context context, Item item) { + try { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + crosswalk.disseminate(context, item, out); + return out.toByteArray(); + } catch (CrosswalkException | IOException | SQLException | AuthorizeException e) { + throw new RuntimeException(e); + } + } + + private List getBitstreamToExport(Item item) { + try { + return bitstreamService.getBitstreamByBundleName(item, bitstreamBundle); + } catch (SQLException e) { + throw new SQLRuntimeException(e); + } + } + + private void addBitstreamEntry(Context context, Item item, Bitstream bitstream, ZipOutputStream zos) + throws IOException { + + InputStream bitstreamContent = retrieveContent(context, bitstream); + + ZipEntry bitstreamEntry = new ZipEntry(getFolderName(item) + "/" + getBitstreamFileName(context, bitstream)); + zos.putNextEntry(bitstreamEntry); + + try { + writeBitstreamContent(bitstreamContent, zos); + } finally { + zos.closeEntry(); + } + + } + + private void writeBitstreamContent(InputStream content, ZipOutputStream zos) throws IOException { + byte[] bytes = new byte[1024]; + int length; + while ((length = content.read(bytes)) >= 0) { + zos.write(bytes, 0, length); + } + } + + private String getBitstreamFileName(Context context, Bitstream bitstream) { + String name = "bitstream_" + bitstream.getID().toString(); + return getBitstreamExtension(context, bitstream) + .map(extension -> name + "." + extension) + .orElse(name); + } + + private Optional getBitstreamExtension(Context context, Bitstream bitstream) { + try { + return bitstream.getFormat(context).getExtensions().stream().findFirst(); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + private InputStream retrieveContent(Context context, Bitstream bitstream) { + try { + return bitstreamStorageService.retrieve(context, bitstream); + } catch (SQLException | IOException e) { + throw new RuntimeException(e); + } + } + + private String getMetadataFileName() { + return metadataFileName; + } + + private String getFolderName(Item item) { + return item.getID().toString(); + } + + private boolean isMemberOfGroupNamed(Context context, EPerson ePerson, String groupName) { + try { + Group group = groupService.findByName(context, groupName); + return groupService.isMember(context, ePerson, group); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @Override + public String getMIMEType() { + return "application/octet-stream"; + } + + public void setCrosswalkMode(CrosswalkMode crosswalkMode) { + this.crosswalkMode = crosswalkMode; + } + + @Override + public CrosswalkMode getCrosswalkMode() { + return Optional.ofNullable(this.crosswalkMode).orElse(CrosswalkMode.MULTIPLE); + } + + private boolean hasExpectedEntityType(Item item) { + if (StringUtils.isBlank(entityType)) { + return true; + } + return entityType.equals(itemService.getEntityType(item)); + } + + @Override + public String getFileName() { + return getZipName(); + } + + public String getZipName() { + return zipName; + } + + public void setZipName(String zipName) { + this.zipName = zipName; + } + + public Optional getEntityType() { + return Optional.ofNullable(entityType); + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public StreamDisseminationCrosswalk getCrosswalk() { + return crosswalk; + } + + public void setCrosswalk(StreamDisseminationCrosswalk crosswalk) { + this.crosswalk = crosswalk; + } + + public String getBitstreamBundle() { + return bitstreamBundle; + } + + public void setBitstreamBundle(String bitstreamBundle) { + this.bitstreamBundle = bitstreamBundle; + } + + public void setMetadataFileName(String metadataFileName) { + this.metadataFileName = metadataFileName; + } + + public List getAllowedGroups() { + return allowedGroups; + } + + public void setAllowedGroups(List allowedGroups) { + this.allowedGroups = allowedGroups; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/script/BulkItemExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/script/BulkItemExportScriptConfiguration.java index 717d62b3df92..168e720e581b 100644 --- a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/script/BulkItemExportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/script/BulkItemExportScriptConfiguration.java @@ -8,12 +8,14 @@ package org.dspace.content.integration.crosswalks.script; import java.sql.SQLException; +import java.util.List; import java.util.Optional; import org.apache.commons.cli.Options; import org.dspace.authorize.factory.AuthorizeServiceFactory; import org.dspace.authorize.service.AuthorizeService; import org.dspace.core.Context; +import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.services.ConfigurationService; import org.dspace.utils.DSpace; @@ -29,6 +31,11 @@ public class BulkItemExportScriptConfiguration extends private Class dspaceRunnableClass; + @Override + public boolean isAllowedToExecute(Context context, List commandLineParameters) { + return this.isAllowedToExecute(context); + } + @Override public boolean isAllowedToExecute(Context context) { StringBuilder property = new StringBuilder("bulk-export.limit."); diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/script/ItemExport.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/script/ItemExport.java index d52287ad3631..43b5d0b0971e 100644 --- a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/script/ItemExport.java +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/script/ItemExport.java @@ -10,6 +10,7 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.sql.SQLException; +import java.util.Objects; import java.util.UUID; import org.apache.commons.cli.ParseException; @@ -67,6 +68,7 @@ public void internalRun() throws Exception { context = new Context(Context.Mode.READ_ONLY); assignCurrentUserInContext(); + assignHandlerLocaleInContext(); assignSpecialGroupsInContext(); if (exportFormat == null) { @@ -140,6 +142,16 @@ private void assignSpecialGroupsInContext() throws SQLException { } } + private void assignHandlerLocaleInContext() { + if (Objects.nonNull(this.handler) && + Objects.nonNull(this.context) && + Objects.nonNull(this.handler.getLocale()) && + !this.handler.getLocale().equals(this.context.getCurrentLocale()) + ) { + this.context.setCurrentLocale(this.handler.getLocale()); + } + } + private StreamDisseminationCrosswalk getCrosswalkByType(String type) { return new DSpace().getSingletonService(StreamDisseminationCrosswalkMapper.class).getByType(type); } diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/script/ItemExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/script/ItemExportScriptConfiguration.java index 22f65cb3b907..ff22414e0596 100644 --- a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/script/ItemExportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/script/ItemExportScriptConfiguration.java @@ -7,8 +7,11 @@ */ package org.dspace.content.integration.crosswalks.script; +import java.util.List; + import org.apache.commons.cli.Options; import org.dspace.core.Context; +import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.configuration.ScriptConfiguration; /** @@ -22,6 +25,11 @@ public class ItemExportScriptConfiguration extends ScriptC private Class dspaceRunnableClass; + @Override + public boolean isAllowedToExecute(Context context, List commandLineParameters) { + return this.isAllowedToExecute(context); + } + @Override public boolean isAllowedToExecute(Context context) { return true; diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/service/ItemExportFormatServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/service/ItemExportFormatServiceImpl.java index 4d33ba35c5e8..5745ec3e8ce8 100644 --- a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/service/ItemExportFormatServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/service/ItemExportFormatServiceImpl.java @@ -45,6 +45,7 @@ public ItemExportFormat get(Context context, String id) { public List getAll(Context context) { return this.streamDissiminatorCrosswalkMapper.getAllItemExportCrosswalks().entrySet().stream() + .filter(entry -> entry.getValue().isAuthorized(context)) .map(entry -> buildItemExportFormat(entry.getKey(), entry.getValue())) .collect(Collectors.toList()); @@ -58,6 +59,7 @@ public List byEntityTypeAndMolteplicity(Context context, Strin .entrySet().stream() .filter(entry -> hasSameMolteplicity(entry.getValue(), molteplicity)) .filter(entry -> hasSameEntityType(entry.getValue(), entityType)) + .filter(entry -> entry.getValue().isAuthorized(context)) .map(entry -> buildItemExportFormat(entry.getKey(), entry.getValue())) .collect(Collectors.toList()); diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/virtualfields/ItemDOIService.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/virtualfields/ItemDOIService.java new file mode 100644 index 000000000000..03229f634a6b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/virtualfields/ItemDOIService.java @@ -0,0 +1,59 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.integration.crosswalks.virtualfields; + +import java.util.Comparator; +import java.util.List; + +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.ItemService; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; + + +public class ItemDOIService { + static final String CFG_PREFIX = "identifier.doi.prefix"; + + static final String DOI_METADATA = "dc.identifier.doi"; + + @Autowired + protected ItemService itemService; + @Autowired + private ConfigurationService configurationService; + + public String[] getAlternativeDOIFromItem(Item item) { + List metadataValueList = itemService.getMetadataByMetadataString(item, DOI_METADATA); + return getAlternativeDOI(metadataValueList, getPrimaryDOI(metadataValueList)); + } + private String[] getAlternativeDOI(List metadataValueList, String primaryValue) { + return metadataValueList.stream().map(MetadataValue::getValue) + .filter(value -> !value.equals(primaryValue)).toArray(String[]::new); + } + + public String getPrimaryDOIFromItem(Item item) { + return getPrimaryDOI(itemService.getMetadataByMetadataString(item, DOI_METADATA)); + } + + private String getPrimaryDOI(List metadataValueList) { + return metadataValueList.stream().filter(metadata -> metadata.getValue().contains(getPrefix())) + .min(Comparator.comparingInt(MetadataValue::getPlace)).map(MetadataValue::getValue) + .orElse(!metadataValueList.isEmpty() ? metadataValueList.get(0).getValue() : null); + } + + protected String getPrefix() { + String prefix; + prefix = this.configurationService.getProperty(CFG_PREFIX); + if (null == prefix) { + throw new RuntimeException("Unable to load DOI prefix from " + + "configuration. Cannot find property " + + CFG_PREFIX + "."); + } + return prefix; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/virtualfields/VirtualFieldAlternativeDOI.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/virtualfields/VirtualFieldAlternativeDOI.java new file mode 100644 index 000000000000..3966566196cb --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/virtualfields/VirtualFieldAlternativeDOI.java @@ -0,0 +1,30 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.integration.crosswalks.virtualfields; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; + + +public class VirtualFieldAlternativeDOI implements VirtualField { + + @Autowired + private ItemDOIService itemDOIService; + + @Override + public String[] getMetadata(Context context, Item item, String fieldName) { + String[] qualifiers = StringUtils.split(fieldName, "."); + if (qualifiers.length != 3) { + throw new IllegalArgumentException("Invalid field name " + fieldName); + } + + return itemDOIService.getAlternativeDOIFromItem(item); + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/virtualfields/VirtualFieldPrimaryDOI.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/virtualfields/VirtualFieldPrimaryDOI.java new file mode 100644 index 000000000000..3039ded0df84 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/virtualfields/VirtualFieldPrimaryDOI.java @@ -0,0 +1,30 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.integration.crosswalks.virtualfields; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; + + +public class VirtualFieldPrimaryDOI implements VirtualField { + + @Autowired + private ItemDOIService itemDOIService; + + @Override + public String[] getMetadata(Context context, Item item, String fieldName) { + String[] qualifiers = StringUtils.split(fieldName, "."); + if (qualifiers.length != 3) { + throw new IllegalArgumentException("Invalid field name " + fieldName); + } + + return new String[] {itemDOIService.getPrimaryDOIFromItem(item)}; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/virtualfields/VirtualFieldVocabularyI18nValuePair.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/virtualfields/VirtualFieldVocabularyI18nValuePair.java new file mode 100644 index 000000000000..89d9181c20bc --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/virtualfields/VirtualFieldVocabularyI18nValuePair.java @@ -0,0 +1,193 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.integration.crosswalks.virtualfields; + +import java.util.Locale; +import java.util.Objects; +import java.util.Optional; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.authority.ChoiceAuthority; +import org.dspace.content.authority.DCInputAuthority; +import org.dspace.content.authority.service.ChoiceAuthorityService; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.core.I18nUtil; +import org.dspace.core.factory.CoreServiceFactory; +import org.dspace.core.service.PluginService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link VirtualField} that translates {@code value-pair} + * and {@code vocabulary-fields} into displayable labels. + * Internally uses the {@link ChoiceAuthorityService} to translate them. + *
      + *
      + * (Example: {@code @virtual.vocabulary_18n.metadataField@}) + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class VirtualFieldVocabularyI18nValuePair implements VirtualField { + + private final static Logger LOGGER = LoggerFactory.getLogger(VirtualFieldVocabularyI18nValuePair.class); + + @Autowired + private ItemService itemService; + @Autowired + private ChoiceAuthorityService choiceAuthorityService; + + private PluginService pluginService = CoreServiceFactory.getInstance().getPluginService(); + + @Override + public String[] getMetadata(Context context, Item item, String fieldName) { + String[] virtualFieldName = fieldName.split("\\.", 4); + + if (virtualFieldName.length < 3 || virtualFieldName.length > 4) { + LOGGER.warn("Invalid value-pairs virtual field: " + fieldName); + return new String[] {}; + } + String vocabularyName = getVocabularyName(virtualFieldName); + String metadataField = virtualFieldName[2].replaceAll("-", "."); + Locale locale = getLocale(context); + + return itemService.getMetadataByMetadataString(item, metadataField) + .stream() + .map(metadataValue -> + getLabelForVocabulary(vocabularyName, metadataValue, locale) + .orElse(getDisplayableLabel(item, metadataValue, locale.getLanguage())) + ) + .toArray(String[]::new); + } + + protected Optional getLabelForVocabulary( + String vocabularyName, MetadataValue metadataValue, Locale locale + ) { + return Optional.ofNullable(vocabularyName) + .map(vocabulary -> (ChoiceAuthority) pluginService.getNamedPlugin(ChoiceAuthority.class, vocabulary)) + .filter(Objects::nonNull) + .flatMap(choiceAuthority -> Optional.ofNullable(metadataValue.getAuthority()) + .flatMap( + authority -> getLabelWithFallback(choiceAuthority, authority, locale, I18nUtil.getDefaultLocale()) + ) + .or( + () -> getLabelWithFallback( + choiceAuthority, metadataValue.getValue(), + locale, I18nUtil.getDefaultLocale() + ) + ) + ); + } + + private Optional getLabelWithFallback( + ChoiceAuthority choiceAuthority, String authKey, Locale locale, Locale fallbackLocale + ) { + return getValidLabel( + Optional.ofNullable(choiceAuthority.getLabel(authKey, locale.getLanguage())) + ) + .or( + () -> getValidLabel( + Optional.ofNullable( + choiceAuthority.getLabel( + authKey, + fallbackLocale.getLanguage() + ) + ) + ) + ); + } + + protected String getDisplayableLabel(Item item, MetadataValue metadataValue, String language) { + return getLabelForCurrentLanguage(item, metadataValue, language) + .or(() -> getLabelForDefaultLanguage(item, metadataValue)) + .orElse(metadataValue.getValue()); + } + + protected Optional getLabelForDefaultLanguage(Item item, MetadataValue metadataValue) { + return getLabelForVocabulary(item, metadataValue, I18nUtil.getDefaultLocale().getLanguage()) + .or(() -> getLabelForValuePair(item, metadataValue, I18nUtil.getDefaultLocale().getLanguage())); + } + + protected Optional getLabelForCurrentLanguage(Item item, MetadataValue metadataValue, String language) { + return getLabelForVocabulary(item, metadataValue, language) + .or(() -> getLabelForValuePair(item, metadataValue, language)); + } + + private Optional getLabelForVocabulary(Item item, MetadataValue metadataValue, String language) { + return getValidLabel( + Optional.ofNullable(metadataValue) + .filter(mv -> StringUtils.isNotBlank(mv.getAuthority())) + .map(mv -> getVocabulary(item, mv, language)) + ); + } + + private Optional getLabelForValuePair(Item item, MetadataValue metadataValue, String language) { + return getValidLabel( + Optional.ofNullable(metadataValue) + .filter(mv -> StringUtils.isNotBlank(mv.getValue())) + .map(mv -> getValuePair(item, mv, language)) + ); + } + + private String getVocabulary(Item item, MetadataValue metadataValue, String language) { + try { + return this.choiceAuthorityService + .getLabel( + metadataValue, item.getType(), + item.getOwningCollection(), language + ); + } catch (Exception e) { + LOGGER.warn("Error while retrieving the vocabulary for: " + + metadataValue.getMetadataField().toString(), e + ); + } + return null; + } + + + private String getValuePair(Item item, MetadataValue metadataValue, String language) { + try { + return this.choiceAuthorityService + .getLabel( + metadataValue.getMetadataField().toString(), item.getType(), + item.getOwningCollection(), metadataValue.getValue(), language + ); + } catch (Exception e) { + LOGGER.warn( + "Error while retrievingthe value-pair for: " + + metadataValue.getMetadataField().toString(), + e + ); + } + return null; + } + + private String getVocabularyName(String[] virtualFieldName) { + return Optional.of(virtualFieldName.length) + .filter(l -> l == 4) + .map(l -> virtualFieldName[l - 1]) + .orElse(null); + } + + private Optional getValidLabel(Optional label) { + return label.filter(this::isValidLabel); + } + + private boolean isValidLabel(String s) { + return s != null && !s.contains(DCInputAuthority.UNKNOWN_KEY); + } + + private Locale getLocale(Context context) { + return Optional.ofNullable(context.getCurrentLocale()) + .orElse(I18nUtil.getDefaultLocale()); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/InCollectionFilter.java b/dspace-api/src/main/java/org/dspace/content/logic/InCollectionFilter.java new file mode 100644 index 000000000000..c7697ce82fa1 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/InCollectionFilter.java @@ -0,0 +1,124 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic; + +import java.sql.SQLException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Collection; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.handle.service.HandleService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * A condition that accepts a list of collection handles and returns true + * if the item belongs to any of them. + * + * @author Kim Shepherd + * @author Giuseppe Digilio + */ +public class InCollectionFilter implements Filter { + + @Autowired(required = true) + protected ItemService itemService; + @Autowired(required = true) + protected CollectionService collectionService; + @Autowired(required = true) + protected HandleService handleService; + + private String name; + private Map parameters = new HashMap<>(); + private static Logger log = LogManager.getLogger(InCollectionFilter.class); + + /** + * Get parameters set by spring configuration in item-filters.xml + * These could be any kind of map that the extending condition class needs for evaluation + * @return map of parameters + * @throws LogicalStatementException + */ + public Map getParameters() throws LogicalStatementException { + return this.parameters; + } + + /** + * Set parameters - used by Spring when creating beans from item-filters.xml + * These could be any kind of map that the extending condition class needs for evaluation + * @param parameters + * @throws LogicalStatementException + */ + @Autowired(required = true) + public void setParameters(Map parameters) throws LogicalStatementException { + this.parameters = parameters; + } + + /** + * Return true if item is in one of the specified collections + * Return false if not + * @param context DSpace context + * @param item Item to evaluate + * @return boolean result of evaluation + * @throws LogicalStatementException + */ + @Override + public Boolean getResult(Context context, Item item) throws LogicalStatementException { + + List collectionHandles = (List)getParameters().get("collections"); + List itemCollections = item.getCollections(); + for (Collection collection : itemCollections) { + if (collectionHandles.contains(collection.getHandle())) { + log.debug("item " + item.getHandle() + " is in collection " + + collection.getHandle() + ", returning true"); + return true; + } + } + + // Look for the parent object of the item. This is important as the item.getOwningCollection method + // may return null, even though the item itself does have a parent object, at the point of archival + try { + DSpaceObject parent = itemService.getParentObject(context, item); + if (parent != null) { + log.debug("Got parent DSO for item: " + parent.getID().toString()); + log.debug("Parent DSO handle: " + parent.getHandle()); + if (collectionHandles.contains(parent.getHandle())) { + log.debug("item " + item.getHandle() + " is in collection " + + parent.getHandle() + ", returning true"); + return true; + } + } else { + log.debug("Parent DSO is null..."); + } + } catch (SQLException e) { + log.error("Error obtaining parent DSO", e); + throw new LogicalStatementException(e); + } + + // If we reach this statement, the item did not appear in any of the collections from the parameters + log.debug("item " + item.getHandle() + " not found in the passed collection handle list"); + + return false; + } + + @Override + public void setBeanName(String name) { + log.debug("Initialize bean " + name); + this.name = name; + } + + @Override + public String getName() { + return name; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/security/AccessItemMode.java b/dspace-api/src/main/java/org/dspace/content/security/AccessItemMode.java index 2aee66fed1ff..e2954bf8f83c 100644 --- a/dspace-api/src/main/java/org/dspace/content/security/AccessItemMode.java +++ b/dspace-api/src/main/java/org/dspace/content/security/AccessItemMode.java @@ -9,6 +9,8 @@ import java.util.List; +import org.dspace.content.logic.Filter; + /** * Interface to be extended for the configuration related to access item modes. * @@ -50,4 +52,6 @@ public interface AccessItemMode { * @return the group list */ public List getGroups(); + + public Filter getAdditionalFilter(); } diff --git a/dspace-api/src/main/java/org/dspace/content/security/CrisSecurity.java b/dspace-api/src/main/java/org/dspace/content/security/CrisSecurity.java index 3fcd83864175..9a472b8a40c3 100644 --- a/dspace-api/src/main/java/org/dspace/content/security/CrisSecurity.java +++ b/dspace-api/src/main/java/org/dspace/content/security/CrisSecurity.java @@ -23,6 +23,7 @@ public enum CrisSecurity { ITEM_ADMIN, SUBMITTER, SUBMITTER_GROUP, - GROUP; + GROUP, + ALL; } diff --git a/dspace-api/src/main/java/org/dspace/content/security/CrisSecurityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/security/CrisSecurityServiceImpl.java index 4a8b2c313846..99add81e862b 100644 --- a/dspace-api/src/main/java/org/dspace/content/security/CrisSecurityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/security/CrisSecurityServiceImpl.java @@ -11,6 +11,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.UUID; import org.apache.commons.collections.CollectionUtils; @@ -55,37 +56,46 @@ public boolean hasAccess(Context context, Item item, EPerson user, AccessItemMod .anyMatch(security -> hasAccess(context, item, user, accessMode, security)); } - private boolean hasAccess(Context context, Item item, EPerson user, AccessItemMode accessMode, - CrisSecurity crisSecurity) { - + private boolean hasAccess( + Context context, Item item, EPerson user, AccessItemMode accessMode, CrisSecurity crisSecurity + ) { try { + final boolean checkSecurity = checkSecurity(context, item, user, accessMode, crisSecurity); - switch (crisSecurity) { - case ADMIN: - return authorizeService.isAdmin(context, user); - case CUSTOM: - return hasAccessByCustomPolicy(context, item, user, accessMode); - case GROUP: - return hasAccessByGroup(context, user, accessMode.getGroups()); - case ITEM_ADMIN: - return authorizeService.isAdmin(context, user, item); - case OWNER: - return isOwner(user, item); - case SUBMITTER: - return user != null && user.equals(item.getSubmitter()); - case SUBMITTER_GROUP: - return isUserInSubmitterGroup(context, item, user); - case NONE: - default: - return false; - } - + return Optional.ofNullable(accessMode.getAdditionalFilter()) + .map(filter -> checkSecurity && filter.getResult(context, item)) + .orElse(checkSecurity); } catch (SQLException e) { - throw new RuntimeException(e); + throw new SQLRuntimeException(e); } } + private boolean checkSecurity(Context context, Item item, EPerson user, AccessItemMode accessMode, + CrisSecurity crisSecurity) throws SQLException { + switch (crisSecurity) { + case ADMIN: + return authorizeService.isAdmin(context, user); + case CUSTOM: + return hasAccessByCustomPolicy(context, item, user, accessMode); + case GROUP: + return hasAccessByGroup(context, user, accessMode.getGroups()); + case ITEM_ADMIN: + return authorizeService.isAdmin(context, user, item); + case OWNER: + return isOwner(user, item); + case SUBMITTER: + return user != null && user.equals(item.getSubmitter()); + case SUBMITTER_GROUP: + return isUserInSubmitterGroup(context, item, user); + case ALL: + return true; + case NONE: + default: + return false; + } + } + private boolean isOwner(EPerson eperson, Item item) { return ePersonService.isOwnerOfItem(eperson, item); } diff --git a/dspace-api/src/main/java/org/dspace/content/security/MetadataSecurityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/security/MetadataSecurityServiceImpl.java index 7ea41807c11f..603ddb228f25 100644 --- a/dspace-api/src/main/java/org/dspace/content/security/MetadataSecurityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/security/MetadataSecurityServiceImpl.java @@ -145,6 +145,17 @@ private List getPermissionFilteredMetadata(Context context, Item } + private boolean canEditItem(Context context, Item item) { + if (context == null) { + return false; + } + try { + return this.itemService.canEdit(context, item); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + private List findBoxes(Context context, Item item, boolean preventBoxSecurityCheck) { if (context == null || preventBoxSecurityCheck) { // the context could be null if the converter is used to prepare test data or in a batch script @@ -169,7 +180,11 @@ private boolean isMetadataFieldVisible(Context context, List boxe if (CollectionUtils.isNotEmpty(boxes)) { return isMetadataFieldVisibleByBoxes(context, boxes, item, metadataField, preventBoxSecurityCheck); } - return isNotAdmin(context) ? isNotHidden(context, metadataField) : true; + return isNotAdmin(context) ? isMetadataFieldVisibleFor(context, item, metadataField) : true; + } + + private boolean isMetadataFieldVisibleFor(Context context, Item item, MetadataField metadataField) { + return canEditItem(context, item) || isNotHidden(context, metadataField); } private boolean isMetadataValueReturnAllowed(Context context, Item item, MetadataValue metadataValue) { @@ -210,7 +225,7 @@ private boolean isMetadataFieldVisibleByBoxes(Context context, List dcInputsSet(final String sd) { } } + private boolean isAdmin(Context context) { + return !isNotAdmin(context); + } + private boolean isNotAdmin(Context context) { try { return context == null || !authorizeService.isAdmin(context); diff --git a/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java b/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java index 3f5b17630a27..fa1cbc38beae 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java @@ -22,6 +22,7 @@ import org.dspace.content.Bundle; import org.dspace.content.Collection; import org.dspace.content.Community; +import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.core.Context; @@ -209,6 +210,8 @@ public InputStream retrieve(Context context, Bitstream bitstream) public Bitstream getBitstreamByName(Item item, String bundleName, String bitstreamName) throws SQLException; + List getBitstreamByBundleName(Item item, String bundleName) throws SQLException; + public Bitstream getFirstBitstream(Item item, String bundleName) throws SQLException; public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLException; @@ -243,4 +246,8 @@ List findShowableByItem(Context context, UUID itemId, String bundleNa List findByItemAndBundleAndMetadata(Context context, Item item, String bundleName, Map filterMetadata); + boolean isOriginalBitstream(DSpaceObject dso) throws SQLException; + + void updateThumbnailResourcePolicies(Context context, Bitstream bitstream) throws SQLException; + } diff --git a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java index 6ce376908bf7..f06208e2d151 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java @@ -15,6 +15,7 @@ import java.util.UUID; import org.dspace.authorize.AuthorizeException; +import org.dspace.browse.ItemCountException; import org.dspace.content.Bitstream; import org.dspace.content.Collection; import org.dspace.content.Community; @@ -534,4 +535,27 @@ public int countCollectionsWithSubmit(String q, Context context, Community commu */ public String getEntityType(Collection collection); + /** + * Returns a list of all collections for a specific entity type. + * NOTE: for better performance, this method retrieves its results from an index (cache) + * and does not query the database directly. + * This means that results may be stale or outdated until + * https://github.com/DSpace/DSpace/issues/2853 is resolved." + * + * @param context DSpace Context + * @param entityType limit the returned collection to those related to given entity type + * @return list of collections found + * @throws SearchServiceException if search error + */ + public List findAllCollectionsByEntityType(Context context, String entityType) + throws SearchServiceException; + + /** + * Returns total collection archived items + * + * @param collection Collection + * @return total collection archived items + * @throws ItemCountException + */ + int countArchivedItems(Collection collection) throws ItemCountException; } diff --git a/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java b/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java index e7b62126650c..c089bcec8df1 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java @@ -14,6 +14,7 @@ import java.util.UUID; import org.dspace.authorize.AuthorizeException; +import org.dspace.browse.ItemCountException; import org.dspace.content.Bitstream; import org.dspace.content.Collection; import org.dspace.content.Community; @@ -292,4 +293,13 @@ public void removeSubcommunity(Context context, Community parentCommunity, Commu public List findAuthorizedGroupMapped(Context context, List actions) throws SQLException; int countTotal(Context context) throws SQLException; + + /** + * Returns total community archived items + * + * @param community Community + * @return total community archived items + * @throws ItemCountException + */ + int countArchivedItems(Community community) throws ItemCountException; } diff --git a/dspace-api/src/main/java/org/dspace/content/service/DSpaceObjectService.java b/dspace-api/src/main/java/org/dspace/content/service/DSpaceObjectService.java index 968768681821..aa2911edfb7c 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/DSpaceObjectService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/DSpaceObjectService.java @@ -584,4 +584,6 @@ default void addAndShiftRightSecuredMetadata(Context context, T dso, String sche throws SQLException { } + + boolean exists(Context context, UUID id) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/content/service/InstallItemService.java b/dspace-api/src/main/java/org/dspace/content/service/InstallItemService.java index 67ac2e20499c..d00c62cc91d8 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/InstallItemService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/InstallItemService.java @@ -83,4 +83,15 @@ public Item restoreItem(Context c, InProgressSubmission is, public String getBitstreamProvenanceMessage(Context context, Item myitem) throws SQLException; + /** + * Generate provenance description of direct item submission (not through workflow). + * + * @param context context + * @param item the item to generate description for + * @return provenance description + * @throws SQLException if database error + */ + public String getSubmittedByProvenanceMessage(Context context, Item item) + throws SQLException;; + } diff --git a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java index e6823690743d..5883006a9180 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java @@ -474,7 +474,7 @@ public void replaceAllBitstreamPolicies(Context context, Item item, List findByLastModifiedSince(Context context, Date last) int countWithdrawnItems(Context context) throws SQLException; /** - * finds all items for which the current user has editing rights - * @param context DSpace context object - * @param offset page offset - * @param limit page size limit - * @return list of items for which the current user has editing rights - * @throws SQLException - * @throws SearchServiceException - */ + * finds all items for which the current user has editing rights + * @param context DSpace context object + * @param offset page offset + * @param limit page size limit + * @return list of items for which the current user has editing rights + * @throws SQLException + * @throws SearchServiceException + */ public List findItemsWithEdit(Context context, int offset, int limit) throws SQLException, SearchServiceException; /** - * counts all items for which the current user has editing rights - * @param context DSpace context object - * @return list of items for which the current user has editing rights - * @throws SQLException - * @throws SearchServiceException - */ + * counts all items for which the current user has editing rights + * @param context DSpace context object + * @return list of items for which the current user has editing rights + * @throws SQLException + * @throws SearchServiceException + */ public int countItemsWithEdit(Context context) throws SQLException, SearchServiceException; /** @@ -922,4 +1027,17 @@ public Iterator findRelatedItemsByAuthorityControlledFields(Context contex */ public boolean isLatestVersion(Context context, Item item) throws SQLException; + /** + * Adds a resource policy to the specified item for the given action and EPerson. + * + * @param context the DSpace context + * @param item the item to add the policy to + * @param actionID the ID of the action to add the policy for + * @param eperson the EPerson to add the policy for + * @throws SQLException if a database error occurs + * @throws AuthorizeException if the current user is not authorized to perform this action + */ + void addResourcePolicy(Context context, Item item, int actionID, EPerson eperson) + throws SQLException, AuthorizeException; + } diff --git a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java index 32ad747d765e..c18f256d9564 100644 --- a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java +++ b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java @@ -7,16 +7,23 @@ */ package org.dspace.core; +import java.lang.reflect.Field; import java.sql.SQLException; +import java.util.Arrays; import java.util.Iterator; +import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.UUID; import java.util.stream.Stream; +import javax.persistence.Column; +import javax.persistence.Id; import javax.persistence.Query; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Expression; +import javax.persistence.criteria.Path; import javax.persistence.criteria.Root; import com.google.common.collect.AbstractIterator; @@ -95,6 +102,44 @@ public T findByID(Context context, Class clazz, UUID id) throws SQLException { return result; } + public static List getAllFields(List fields, Class type) { + fields.addAll(Arrays.asList(type.getDeclaredFields())); + + if (type.getSuperclass() != null) { + getAllFields(fields, type.getSuperclass()); + } + + return fields; + } + + @Override + public boolean exists(Context context, Class clazz, UUID id) throws SQLException { + if (id == null) { + return false; + } + Optional optionalField = + getAllFields(new LinkedList<>(), clazz) + .stream() + .filter(field -> field.isAnnotationPresent(Id.class) && field.isAnnotationPresent(Column.class)) + .findFirst(); + if (optionalField.isEmpty()) { + return false; + } + + Field idField = optionalField.get(); + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, clazz); + + Root root = criteriaQuery.from(clazz); + Path idColumn = root.get(idField.getName()); + criteriaQuery.select(idColumn); + criteriaQuery.where(criteriaBuilder.equal(idColumn, id)); + + org.hibernate.query.Query query = getHibernateSession(context).createQuery(criteriaQuery); + query.setMaxResults(1); + return query.uniqueResult() != null; + } + @Override public T findByID(Context context, Class clazz, int id) throws SQLException { @SuppressWarnings("unchecked") diff --git a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDSODAO.java b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDSODAO.java index e6535f094152..e9c6b95b7f05 100644 --- a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDSODAO.java +++ b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDSODAO.java @@ -83,13 +83,14 @@ protected void addMetadataValueWhereQuery(StringBuilder query, List groups) { } } + public Set getCachedAllMemberGroupsSet(EPerson ePerson) { if (isReadOnly()) { return readOnlyCache.getCachedAllMemberGroupsSet(ePerson); @@ -970,4 +985,15 @@ public void setAuthenticationMethod(final String authenticationMethod) { public boolean isContextUserSwitched() { return currentUserPreviousState != null; } + + /** + * Returns the default "Administrator" group for DSpace administrators. + * The result is cached in the 'adminGroup' field, so it is only looked up once. + * This is done to improve performance, as this method is called quite often. + */ + public Group getAdminGroup() throws SQLException { + return (adminGroup == null) ? EPersonServiceFactory.getInstance() + .getGroupService() + .findByName(this, Group.ADMIN) : adminGroup; + } } diff --git a/dspace-api/src/main/java/org/dspace/core/ContextReadOnlyCache.java b/dspace-api/src/main/java/org/dspace/core/ContextReadOnlyCache.java index 9a6e5bfc0706..e4fdb4c7c1d4 100644 --- a/dspace-api/src/main/java/org/dspace/core/ContextReadOnlyCache.java +++ b/dspace-api/src/main/java/org/dspace/core/ContextReadOnlyCache.java @@ -95,4 +95,8 @@ private Pair buildGroupMembershipKey(Group group, EPerson eperso eperson == null ? "" : eperson.getID().toString()); } + protected void clearAllMembershipGroupCache(EPerson eperson) { + allMemberGroupsCache.remove(buildAllMembersGroupKey(eperson)); + } + } diff --git a/dspace-api/src/main/java/org/dspace/core/CrisConstants.java b/dspace-api/src/main/java/org/dspace/core/CrisConstants.java index 18def2d10316..1b4d1c9dc5c5 100644 --- a/dspace-api/src/main/java/org/dspace/core/CrisConstants.java +++ b/dspace-api/src/main/java/org/dspace/core/CrisConstants.java @@ -21,7 +21,7 @@ public class CrisConstants { * same number than the parent leading metadata */ public static final String PLACEHOLDER_PARENT_METADATA_VALUE = "#PLACEHOLDER_PARENT_METADATA_VALUE#"; - + public static final String DSPACE_BASE_VERSION = "DSpace 7.6.1"; public static final MetadataFieldName MD_ENTITY_TYPE = new MetadataFieldName("dspace", "entity", "type"); public static final MetadataFieldName MD_SUBMISSION_TYPE = new MetadataFieldName("cris", "submission", "definition"); diff --git a/dspace-api/src/main/java/org/dspace/core/DBConnection.java b/dspace-api/src/main/java/org/dspace/core/DBConnection.java index ebbb412153a4..128c35871684 100644 --- a/dspace-api/src/main/java/org/dspace/core/DBConnection.java +++ b/dspace-api/src/main/java/org/dspace/core/DBConnection.java @@ -155,4 +155,12 @@ public interface DBConnection { * @throws java.sql.SQLException passed through. */ public void uncacheEntity(E entity) throws SQLException; + + /** + * Do a manual flush. This synchronizes the in-memory state of the Session + * with the database (write changes to the database) + * + * @throws SQLException passed through. + */ + public void flushSession() throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/core/Email.java b/dspace-api/src/main/java/org/dspace/core/Email.java index 67567ce97f6f..8cbfd6e1deac 100644 --- a/dspace-api/src/main/java/org/dspace/core/Email.java +++ b/dspace-api/src/main/java/org/dspace/core/Email.java @@ -41,7 +41,6 @@ import javax.mail.internet.MimeMultipart; import javax.mail.internet.ParseException; -import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.velocity.Template; @@ -57,26 +56,40 @@ import org.dspace.services.factory.DSpaceServicesFactory; /** - * Class representing an e-mail message, also used to send e-mails. + * Class representing an e-mail message. The {@link send} method causes the + * assembled message to be formatted and sent. *

      * Typical use: - *

      + *
      + * Email email = Email.getEmail(path);
      + * email.addRecipient("foo@bar.com");
      + * email.addArgument("John");
      + * email.addArgument("On the Testing of DSpace");
      + * email.send();
      + * 
      + * {@code path} is the filesystem path of an email template, typically in + * {@code ${dspace.dir}/config/emails/} and can include the subject -- see + * below. Templates are processed by
      + * Apache Velocity. They may contain VTL directives and property + * placeholders. + *

      + * {@link addArgument(string)} adds a property to the {@code params} array + * in the Velocity context, which can be used to replace placeholder tokens + * in the message. These arguments are indexed by number in the order they were + * added to the message. + *

      + * The DSpace configuration properties are also available to templates as the + * array {@code config}, indexed by name. Example: {@code ${config.get('dspace.name')}} + *

      + * Recipients and attachments may be added as needed. See {@link addRecipient}, + * {@link addAttachment(File, String)}, and + * {@link addAttachment(InputStream, String, String)}. *

      - * Email email = new Email();
      - * email.addRecipient("foo@bar.com");
      - * email.addArgument("John");
      - * email.addArgument("On the Testing of DSpace");
      - * email.send();
      - *

      + * Headers such as Subject may be supplied by the template, by defining them + * using the VTL directive {@code #set()}. Only headers named in the DSpace + * configuration array property {@code mail.message.headers} will be added. *

      - * name is the name of an email template in - * dspace-dir/config/emails/ (which also includes the subject.) - * arg0 and arg1 are arguments to fill out the - * message with. - *

      - * Emails are formatted using Apache Velocity. Headers such as Subject may be - * supplied by the template, by defining them using #set(). Example: - *

      + * Example: * *
        *
      @@ -91,12 +104,14 @@
        *
        *     Thank you for sending us your submission "${params[1]}".
        *
      + *     --
      + *     The ${config.get('dspace.name')} Team
      + *
        * 
      * *

      * If the example code above was used to send this mail, the resulting mail * would have the subject Example e-mail and the body would be: - *

      * *
        *
      @@ -105,7 +120,16 @@
        *
        *     Thank you for sending us your submission "On the Testing of DSpace".
        *
      + *     --
      + *     The DSpace Team
      + *
        * 
      + *

      + * There are two ways to load a message body. One can create an instance of + * {@link Email} and call {@link setContent} on it, passing the body as a String. Or + * one can use the static factory method {@link getEmail} to load a file by its + * complete filesystem path. In either case the text will be loaded into a + * Velocity template. * * @author Robert Tansley * @author Jim Downing - added attachment handling code @@ -115,7 +139,6 @@ public class Email { /** * The content of the message */ - private String content; private String contentName; /** @@ -182,13 +205,12 @@ public Email() { moreAttachments = new ArrayList<>(10); subject = ""; template = null; - content = ""; replyTo = null; charset = null; } /** - * Add a recipient + * Add a recipient. * * @param email the recipient's email address */ @@ -211,16 +233,24 @@ public void addCcAddress(String email) { * "Subject:" line must be stripped. * * @param name a name for this message body - * @param cnt the content of the message + * @param content the content of the message */ - public void setContent(String name, String cnt) { - content = cnt; + public void setContent(String name, String content) { contentName = name; arguments.clear(); + + VelocityEngine templateEngine = new VelocityEngine(); + templateEngine.init(VELOCITY_PROPERTIES); + + StringResourceRepository repo = (StringResourceRepository) + templateEngine.getApplicationAttribute(RESOURCE_REPOSITORY_NAME); + repo.putStringResource(contentName, content); + // Turn content into a template. + template = templateEngine.getTemplate(contentName); } /** - * Set the subject of the message + * Set the subject of the message. * * @param s the subject of the message */ @@ -229,7 +259,7 @@ public void setSubject(String s) { } /** - * Set the reply-to email address + * Set the reply-to email address. * * @param email the reply-to email address */ @@ -238,7 +268,7 @@ public void setReplyTo(String email) { } /** - * Fill out the next argument in the template + * Fill out the next argument in the template. * * @param arg the value for the next argument */ @@ -246,6 +276,13 @@ public void addArgument(Object arg) { arguments.add(arg); } + /** + * Add an attachment bodypart to the message from an external file. + * + * @param f reference to a file to be attached. + * @param name a name for the resulting bodypart in the message's MIME + * structure. + */ public void addAttachment(File f, String name) { attachments.add(new FileAttachment(f, name)); } @@ -253,6 +290,17 @@ public void addAttachment(File f, String name) { /** When given a bad MIME type for an attachment, use this instead. */ private static final String DEFAULT_ATTACHMENT_TYPE = "application/octet-stream"; + /** + * Add an attachment bodypart to the message from a byte stream. + * + * @param is the content of this stream will become the content of the + * bodypart. + * @param name a name for the resulting bodypart in the message's MIME + * structure. + * @param mimetype the MIME type of the resulting bodypart, such as + * "text/pdf". If {@code null} it will default to + * "application/octet-stream", which is MIME for "unknown format". + */ public void addAttachment(InputStream is, String name, String mimetype) { if (null == mimetype) { LOG.error("Null MIME type replaced with '" + DEFAULT_ATTACHMENT_TYPE @@ -272,6 +320,11 @@ public void addAttachment(InputStream is, String name, String mimetype) { moreAttachments.add(new InputStreamAttachment(is, name, mimetype)); } + /** + * Set the character set of the message. + * + * @param cs the name of a character set, such as "UTF-8" or "EUC-JP". + */ public void setCharset(String cs) { charset = cs; } @@ -296,15 +349,20 @@ public void reset() { * {@code mail.message.headers} then that name and its value will be added * to the message's headers. * - *

      "subject" is treated specially: if {@link setSubject()} has not been called, - * the value of any "subject" property will be used as if setSubject had - * been called with that value. Thus a template may define its subject, but - * the caller may override it. + *

      "subject" is treated specially: if {@link setSubject()} has not been + * called, the value of any "subject" property will be used as if setSubject + * had been called with that value. Thus a template may define its subject, + * but the caller may override it. * * @throws MessagingException if there was a problem sending the mail. * @throws IOException if IO error */ public void send() throws MessagingException, IOException { + if (null == template) { + // No template -- no content -- PANIC!!! + throw new MessagingException("Email has no body"); + } + ConfigurationService config = DSpaceServicesFactory.getInstance().getConfigurationService(); @@ -342,27 +400,10 @@ public void send() throws MessagingException, IOException { String[] templateHeaders = config.getArrayProperty("mail.message.headers"); // Format the mail message body - VelocityEngine templateEngine = new VelocityEngine(); - templateEngine.init(VELOCITY_PROPERTIES); - VelocityContext vctx = new VelocityContext(); vctx.put("config", new UnmodifiableConfigurationService(config)); vctx.put("params", Collections.unmodifiableList(arguments)); - if (null == template) { - if (StringUtils.isBlank(content)) { - // No template and no content -- PANIC!!! - throw new MessagingException("Email has no body"); - } - // No template, so use a String of content. - StringResourceRepository repo = (StringResourceRepository) - templateEngine.getApplicationAttribute(RESOURCE_REPOSITORY_NAME); - repo.putStringResource(contentName, content); - // Turn content into a template. - template = templateEngine.getTemplate(contentName); - templateHeaders = new String[] {"subject"}; - } - StringWriter writer = new StringWriter(); try { template.merge(vctx, writer); @@ -446,7 +487,8 @@ public void send() throws MessagingException, IOException { // add the stream messageBodyPart = new MimeBodyPart(); messageBodyPart.setDataHandler(new DataHandler( - new InputStreamDataSource(attachment.name,attachment.mimetype,attachment.is))); + new InputStreamDataSource(attachment.name, + attachment.mimetype, attachment.is))); messageBodyPart.setFileName(attachment.name); multipart.addBodyPart(messageBodyPart); } @@ -496,6 +538,9 @@ public void send() throws MessagingException, IOException { /** * Get the VTL template for an email message. The message is suitable * for inserting values using Apache Velocity. + *

      + * Note that everything is stored here, so that only send() throws a + * MessagingException. * * @param emailFile * full name for the email template, for example "/dspace/config/emails/register". @@ -533,15 +578,6 @@ public static Email getEmail(String emailFile) } return email; } - /* - * Implementation note: It might be necessary to add a quick utility method - * like "send(to, subject, message)". We'll see how far we get without it - - * having all emails as templates in the config allows customisation and - * internationalisation. - * - * Note that everything is stored and the run in send() so that only send() - * throws a MessagingException. - */ /** * Test method to send an email to check email server settings @@ -596,7 +632,7 @@ public static void main(String[] args) { } /** - * Utility struct class for handling file attachments. + * Utility record class for handling file attachments. * * @author ojd20 */ @@ -612,7 +648,7 @@ public FileAttachment(File f, String n) { } /** - * Utility struct class for handling file attachments. + * Utility record class for handling file attachments. * * @author Adán Román Ruiz at arvo.es */ @@ -629,6 +665,8 @@ public InputStreamAttachment(InputStream is, String name, String mimetype) { } /** + * Wrap an {@link InputStream} in a {@link DataSource}. + * * @author arnaldo */ public static class InputStreamDataSource implements DataSource { @@ -636,6 +674,14 @@ public static class InputStreamDataSource implements DataSource { private final String contentType; private final ByteArrayOutputStream baos; + /** + * Consume the content of an InputStream and store it in a local buffer. + * + * @param name give the DataSource a name. + * @param contentType the DataSource contains this type of data. + * @param inputStream content to be buffered in the DataSource. + * @throws IOException if the stream cannot be read. + */ InputStreamDataSource(String name, String contentType, InputStream inputStream) throws IOException { this.name = name; this.contentType = contentType; diff --git a/dspace-api/src/main/java/org/dspace/core/GenericDAO.java b/dspace-api/src/main/java/org/dspace/core/GenericDAO.java index a04a0ccbdcc8..bba0281b119c 100644 --- a/dspace-api/src/main/java/org/dspace/core/GenericDAO.java +++ b/dspace-api/src/main/java/org/dspace/core/GenericDAO.java @@ -91,6 +91,17 @@ public interface GenericDAO { */ public T findByID(Context context, Class clazz, int id) throws SQLException; + /** + * Checks if a given id of a target entity with the clazz type exists in the database. + * + * @param context current DSpace context + * @param clazz entity class + * @param id identifier of the entity + * @return true if found, false otherwise + * @throws SQLException + */ + boolean exists(Context context, Class clazz, UUID id) throws SQLException; + /** * Fetch the entity identified by its UUID primary key. * diff --git a/dspace-api/src/main/java/org/dspace/core/HibernateDBConnection.java b/dspace-api/src/main/java/org/dspace/core/HibernateDBConnection.java index 4fdb29588066..858149a64f31 100644 --- a/dspace-api/src/main/java/org/dspace/core/HibernateDBConnection.java +++ b/dspace-api/src/main/java/org/dspace/core/HibernateDBConnection.java @@ -342,4 +342,17 @@ public void uncacheEntity(E entity) throws SQLExcep } } } + + /** + * Do a manual flush. This synchronizes the in-memory state of the Session + * with the database (write changes to the database) + * + * @throws SQLException passed through. + */ + @Override + public void flushSession() throws SQLException { + if (getSession().isDirty()) { + getSession().flush(); + } + } } diff --git a/dspace-api/src/main/java/org/dspace/core/LicenseServiceImpl.java b/dspace-api/src/main/java/org/dspace/core/LicenseServiceImpl.java index 8324105a3085..d895f9a76481 100644 --- a/dspace-api/src/main/java/org/dspace/core/LicenseServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/core/LicenseServiceImpl.java @@ -17,9 +17,12 @@ import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintWriter; +import javax.servlet.http.HttpServletRequest; import org.dspace.core.service.LicenseService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.services.model.Request; +import org.dspace.web.ContextUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -101,13 +104,14 @@ public String getLicenseText(String licenseFile) { /** * Get the site-wide default license that submitters need to grant * + * Localized license requires: default_{{locale}}.license file. + * Locale also must be listed in webui.supported.locales setting. + * * @return the default license */ @Override public String getDefaultSubmissionLicense() { - if (null == license) { - init(); - } + init(); return license; } @@ -115,9 +119,8 @@ public String getDefaultSubmissionLicense() { * Load in the default license. */ protected void init() { - File licenseFile = new File( - DSpaceServicesFactory.getInstance().getConfigurationService().getProperty("dspace.dir") - + File.separator + "config" + File.separator + "default.license"); + Context context = obtainContext(); + File licenseFile = new File(I18nUtil.getDefaultLicense(context)); FileInputStream fir = null; InputStreamReader ir = null; @@ -169,4 +172,24 @@ protected void init() { } } } + + /** + * Obtaining current request context. + * Return new context if getting one from current request failed. + * + * @return DSpace context object + */ + private Context obtainContext() { + try { + Request currentRequest = DSpaceServicesFactory.getInstance().getRequestService().getCurrentRequest(); + if (currentRequest != null) { + HttpServletRequest request = currentRequest.getHttpServletRequest(); + return ContextUtil.obtainContext(request); + } + } catch (Exception e) { + log.error("Can't load current request context."); + } + + return new Context(); + } } diff --git a/dspace-api/src/main/java/org/dspace/core/UUIDIterator.java b/dspace-api/src/main/java/org/dspace/core/UUIDIterator.java new file mode 100644 index 000000000000..7cd2616ff6e7 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/core/UUIDIterator.java @@ -0,0 +1,64 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.core; + +import java.sql.SQLException; +import java.util.Iterator; +import java.util.List; +import java.util.UUID; + +import com.google.common.collect.AbstractIterator; +import org.dspace.content.DSpaceObject; +import org.dspace.core.exception.SQLRuntimeException; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Iterator implementation which allows to iterate over items and commit while + * iterating. Using a list of UUID the iterator doesn't get invalidated after a + * commit + * + * @author stefano.maffei at 4science.com + * @param class type + */ +public class UUIDIterator extends AbstractIterator { + private Class clazz; + + private Iterator iterator; + + @Autowired + private AbstractHibernateDSODAO dao; + + private Context ctx; + + public UUIDIterator(Context ctx, List uuids, Class clazz, AbstractHibernateDSODAO dao) + throws SQLException { + this.ctx = ctx; + this.clazz = clazz; + this.dao = dao; + this.iterator = uuids.iterator(); + } + + @Override + protected T computeNext() { + try { + if (iterator.hasNext()) { + T item = dao.findByID(ctx, clazz, iterator.next()); + if (item != null) { + return item; + } else { + return computeNext(); + } + } else { + return endOfData(); + } + } catch (SQLException e) { + throw new SQLRuntimeException(e); + } + } + +} diff --git a/dspace-api/src/main/java/org/dspace/core/Utils.java b/dspace-api/src/main/java/org/dspace/core/Utils.java index b9fff20c7674..ea9ed57eca04 100644 --- a/dspace-api/src/main/java/org/dspace/core/Utils.java +++ b/dspace-api/src/main/java/org/dspace/core/Utils.java @@ -16,8 +16,6 @@ import java.net.Inet4Address; import java.net.InetAddress; import java.net.MalformedURLException; -import java.net.URI; -import java.net.URISyntaxException; import java.net.URL; import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; @@ -415,7 +413,9 @@ public static String[] tokenize(String metadata) { * @return metadata field key */ public static String standardize(String schema, String element, String qualifier, String separator) { - if (StringUtils.isBlank(qualifier)) { + if (StringUtils.isBlank(element)) { + return null; + } else if (StringUtils.isBlank(qualifier)) { return schema + separator + element; } else { return schema + separator + element + separator + qualifier; @@ -447,14 +447,14 @@ public static String getBaseUrl(String urlString) { */ public static String getHostName(String uriString) { try { - URI uri = new URI(uriString); - String hostname = uri.getHost(); + URL url = new URL(uriString); + String hostname = url.getHost(); // remove the "www." from hostname, if it exists if (hostname != null) { return hostname.startsWith("www.") ? hostname.substring(4) : hostname; } return null; - } catch (URISyntaxException e) { + } catch (MalformedURLException e) { return null; } } diff --git a/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java b/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java index f7ab18c01e54..5891fa017cb0 100644 --- a/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java +++ b/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java @@ -207,9 +207,10 @@ public void init(Curator curator, String taskId) throws IOException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); try { - // disallow DTD parsing to ensure no XXE attacks can occur. + // disallow DTD parsing to ensure no XXE attacks can occur // See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); + factory.setXIncludeAware(false); docBuilder = factory.newDocumentBuilder(); } catch (ParserConfigurationException pcE) { log.error("caught exception: " + pcE); diff --git a/dspace-api/src/main/java/org/dspace/curate/Curation.java b/dspace-api/src/main/java/org/dspace/curate/Curation.java index b3af072a32cd..832cb302df0d 100644 --- a/dspace-api/src/main/java/org/dspace/curate/Curation.java +++ b/dspace-api/src/main/java/org/dspace/curate/Curation.java @@ -152,17 +152,10 @@ private long runQueue(TaskQueue queue, Curator curator) throws SQLException, Aut super.handler.logInfo("Curating id: " + entry.getObjectId()); } curator.clear(); - // does entry relate to a DSO or workflow object? - if (entry.getObjectId().indexOf('/') > 0) { - for (String taskName : entry.getTaskNames()) { - curator.addTask(taskName); - } - curator.curate(context, entry.getObjectId()); - } else { - // TODO: Remove this exception once curation tasks are supported by configurable workflow - // e.g. see https://github.com/DSpace/DSpace/pull/3157 - throw new IllegalArgumentException("curation for workflow items is no longer supported"); + for (String taskName : entry.getTaskNames()) { + curator.addTask(taskName); } + curator.curate(context, entry.getObjectId()); } queue.release(this.queue, ticket, true); return ticket; @@ -175,7 +168,9 @@ private long runQueue(TaskQueue queue, Curator curator) throws SQLException, Aut * @throws SQLException If DSpace contextx can't complete */ private void endScript(long timeRun) throws SQLException { - context.complete(); + if (context.isValid()) { + context.complete(); + } if (verbose) { long elapsed = System.currentTimeMillis() - timeRun; this.handler.logInfo("Ending curation. Elapsed time: " + elapsed); diff --git a/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java index fefb4eb768ea..2587e6b0251e 100644 --- a/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java @@ -8,12 +8,15 @@ package org.dspace.curate; import java.sql.SQLException; +import java.util.List; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.DSpaceObject; import org.dspace.core.Context; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.handle.service.HandleService; +import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link Curation} script @@ -22,9 +25,6 @@ */ public class CurationScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -38,16 +38,37 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { } /** - * Only admin can run Curation script via the scripts and processes endpoints. - * @param context The relevant DSpace context - * @return True if currentUser is admin, otherwise false + * Only repository admins or admins of the target object can run Curation script via the scripts + * and processes endpoints. + * + * @param context The relevant DSpace context + * @param commandLineParameters the parameters that will be used to start the process if known, + * null otherwise + * @return true if the currentUser is allowed to run the script with the specified parameters or + * at least in some case if the parameters are not yet known */ @Override - public boolean isAllowedToExecute(Context context) { + public boolean isAllowedToExecute(Context context, List commandLineParameters) { try { - return authorizeService.isAdmin(context); + if (commandLineParameters == null) { + return authorizeService.isAdmin(context) || authorizeService.isComColAdmin(context) + || authorizeService.isItemAdmin(context); + } else if (commandLineParameters.stream() + .map(DSpaceCommandLineParameter::getName) + .noneMatch("-i"::equals)) { + return authorizeService.isAdmin(context); + } else { + String dspaceObjectID = commandLineParameters.stream() + .filter(parameter -> "-i".equals(parameter.getName())) + .map(DSpaceCommandLineParameter::getValue) + .findFirst() + .get(); + HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); + DSpaceObject dso = handleService.resolveToObject(context, dspaceObjectID); + return authorizeService.isAdmin(context, dso); + } } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + throw new RuntimeException(e); } } diff --git a/dspace-api/src/main/java/org/dspace/curate/Curator.java b/dspace-api/src/main/java/org/dspace/curate/Curator.java index 4076fab51989..670c9bcbe43f 100644 --- a/dspace-api/src/main/java/org/dspace/curate/Curator.java +++ b/dspace-api/src/main/java/org/dspace/curate/Curator.java @@ -88,7 +88,7 @@ public static enum TxScope { protected Appendable reporter = null; protected Invoked iMode = null; protected TaskResolver resolver = new TaskResolver(); - protected TxScope txScope = TxScope.OPEN; + protected TxScope txScope = TxScope.OBJECT; protected CommunityService communityService; protected ItemService itemService; protected HandleService handleService; @@ -258,7 +258,7 @@ public void curate(Context c, String id) throws IOException { } // if curation scoped, commit transaction if (txScope.equals(TxScope.CURATION)) { - Context ctx = curationCtx.get(); + Context ctx = curationContext(); if (ctx != null) { ctx.complete(); } @@ -275,8 +275,9 @@ public void curate(Context c, String id) throws IOException { * (Community, Collection or Item). * @param dso the DSpace object * @throws IOException if IO error + * @throws SQLException */ - public void curate(DSpaceObject dso) throws IOException { + public void curate(DSpaceObject dso) throws IOException, SQLException { if (dso == null) { throw new IOException("Cannot perform curation task(s) on a null DSpaceObject!"); } @@ -307,9 +308,10 @@ public void curate(DSpaceObject dso) throws IOException { * @param c session context in which curation takes place. * @param dso the single object to be curated. * @throws java.io.IOException passed through. + * @throws SQLException */ public void curate(Context c, DSpaceObject dso) - throws IOException { + throws IOException, SQLException { curationCtx.set(c); curate(dso); } @@ -462,6 +464,8 @@ protected boolean doSite(TaskRunner tr, Site site) throws IOException { //Then, perform this task for all Top-Level Communities in the Site // (this will recursively perform task for all objects in DSpace) for (Community subcomm : communityService.findAllTop(ctx)) { + // force a reload of the community in case a commit was performed + subcomm = ctx.reloadEntity(subcomm); if (!doCommunity(tr, subcomm)) { return false; } @@ -480,21 +484,29 @@ protected boolean doSite(TaskRunner tr, Site site) throws IOException { * @param comm Community * @return true if successful, false otherwise * @throws IOException if IO error + * @throws SQLException */ - protected boolean doCommunity(TaskRunner tr, Community comm) throws IOException { + protected boolean doCommunity(TaskRunner tr, Community comm) throws IOException, SQLException { if (!tr.run(comm)) { return false; } + Context context = curationContext(); + // force a reload in case we are committing after each object + comm = context.reloadEntity(comm); for (Community subcomm : comm.getSubcommunities()) { if (!doCommunity(tr, subcomm)) { return false; } } + // force a reload in case we are committing after each object + comm = context.reloadEntity(comm); for (Collection coll : comm.getCollections()) { + context.reloadEntity(coll); if (!doCollection(tr, coll)) { return false; } } + context.uncacheEntity(comm); return true; } @@ -521,6 +533,7 @@ protected boolean doCollection(TaskRunner tr, Collection coll) throws IOExceptio return false; } } + context.uncacheEntity(coll); } catch (SQLException sqlE) { throw new IOException(sqlE.getMessage(), sqlE); } @@ -533,13 +546,12 @@ protected boolean doCollection(TaskRunner tr, Collection coll) throws IOExceptio * * @param dso the DSpace object * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. + * @throws SQLException */ - protected void visit(DSpaceObject dso) throws IOException { - Context curCtx = curationCtx.get(); - if (curCtx != null) { - if (txScope.equals(TxScope.OBJECT)) { - curCtx.dispatchEvents(); - } + protected void visit(DSpaceObject dso) throws IOException, SQLException { + Context curCtx = curationContext(); + if (curCtx != null && txScope.equals(TxScope.OBJECT)) { + curCtx.commit(); } } @@ -552,7 +564,7 @@ public TaskRunner(ResolvedTask task) { this.task = task; } - public boolean run(DSpaceObject dso) throws IOException { + public boolean run(DSpaceObject dso) throws IOException, SQLException { try { if (dso == null) { throw new IOException("DSpaceObject is null"); @@ -562,14 +574,14 @@ public boolean run(DSpaceObject dso) throws IOException { logInfo(logMessage(id)); visit(dso); return !suspend(statusCode); - } catch (IOException ioe) { + } catch (IOException | SQLException e) { //log error & pass exception upwards - System.out.println("Error executing curation task '" + task.getName() + "'; " + ioe); - throw ioe; + System.out.println("Error executing curation task '" + task.getName() + "'; " + e); + throw e; } } - public boolean run(Context c, String id) throws IOException { + public boolean run(Context c, String id) throws IOException, SQLException { try { if (c == null || id == null) { throw new IOException("Context or identifier is null"); diff --git a/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java b/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java index 05c7a8d99930..27a162d543c2 100644 --- a/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java @@ -13,6 +13,8 @@ import java.util.ArrayList; import java.util.List; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Collection; @@ -30,6 +32,7 @@ import org.dspace.workflow.FlowStep; import org.dspace.workflow.Task; import org.dspace.workflow.TaskSet; +import org.dspace.xmlworkflow.Role; import org.dspace.xmlworkflow.RoleMembers; import org.dspace.xmlworkflow.WorkflowConfigurationException; import org.dspace.xmlworkflow.factory.XmlWorkflowFactory; @@ -47,14 +50,17 @@ * Manage interactions between curation and workflow. A curation task can be * attached to a workflow step, to be executed during the step. * + *

      + * NOTE: when run in workflow, curation tasks run with + * authorization disabled. + * * @see CurationTaskConfig * @author mwood */ @Service public class XmlWorkflowCuratorServiceImpl implements XmlWorkflowCuratorService { - private static final Logger LOG - = org.apache.logging.log4j.LogManager.getLogger(); + private static final Logger LOG = LogManager.getLogger(); @Autowired(required = true) protected XmlWorkflowFactory workflowFactory; @@ -97,7 +103,18 @@ public boolean doCuration(Context c, XmlWorkflowItem wfi) throws AuthorizeException, IOException, SQLException { Curator curator = new Curator(); curator.setReporter(reporter); - return curate(curator, c, wfi); + c.turnOffAuthorisationSystem(); + boolean wasAnonymous = false; + if (null == c.getCurrentUser()) { // We need someone to email + wasAnonymous = true; + c.setCurrentUser(ePersonService.getSystemEPerson(c)); + } + boolean failedP = curate(curator, c, wfi); + if (wasAnonymous) { + c.setCurrentUser(null); + } + c.restoreAuthSystemState(); + return failedP; } @Override @@ -123,40 +140,48 @@ public boolean curate(Curator curator, Context c, XmlWorkflowItem wfi) item.setOwningCollection(wfi.getCollection()); for (Task task : step.tasks) { curator.addTask(task.name); - curator.curate(item); - int status = curator.getStatus(task.name); - String result = curator.getResult(task.name); - String action = "none"; - switch (status) { - case Curator.CURATE_FAIL: - // task failed - notify any contacts the task has assigned - if (task.powers.contains("reject")) { - action = "reject"; - } - notifyContacts(c, wfi, task, "fail", action, result); - // if task so empowered, reject submission and terminate - if ("reject".equals(action)) { - workflowService.sendWorkflowItemBackSubmission(c, wfi, - c.getCurrentUser(), null, - task.name + ": " + result); - return false; - } - break; - case Curator.CURATE_SUCCESS: - if (task.powers.contains("approve")) { - action = "approve"; - } - notifyContacts(c, wfi, task, "success", action, result); - if ("approve".equals(action)) { - // cease further task processing and advance submission - return true; - } - break; - case Curator.CURATE_ERROR: - notifyContacts(c, wfi, task, "error", action, result); - break; - default: - break; + + // Check whether the task is configured to be queued rather than automatically run + if (StringUtils.isNotEmpty(step.queue)) { + // queue attribute has been set in the FlowStep configuration: add task to configured queue + curator.queue(c, item.getID().toString(), step.queue); + } else { + // Task is configured to be run automatically + curator.curate(c, item); + int status = curator.getStatus(task.name); + String result = curator.getResult(task.name); + String action = "none"; + switch (status) { + case Curator.CURATE_FAIL: + // task failed - notify any contacts the task has assigned + if (task.powers.contains("reject")) { + action = "reject"; + } + notifyContacts(c, wfi, task, "fail", action, result); + // if task so empowered, reject submission and terminate + if ("reject".equals(action)) { + workflowService.sendWorkflowItemBackSubmission(c, wfi, + c.getCurrentUser(), null, + task.name + ": " + result); + return false; + } + break; + case Curator.CURATE_SUCCESS: + if (task.powers.contains("approve")) { + action = "approve"; + } + notifyContacts(c, wfi, task, "success", action, result); + if ("approve".equals(action)) { + // cease further task processing and advance submission + return true; + } + break; + case Curator.CURATE_ERROR: + notifyContacts(c, wfi, task, "error", action, result); + break; + default: + break; + } } curator.clear(); } @@ -223,8 +248,12 @@ protected void notifyContacts(Context c, XmlWorkflowItem wfi, String status, String action, String message) throws AuthorizeException, IOException, SQLException { List epa = resolveContacts(c, task.getContacts(status), wfi); - if (epa.size() > 0) { + if (!epa.isEmpty()) { workflowService.notifyOfCuration(c, wfi, epa, task.name, action, message); + } else { + LOG.warn("No contacts were found for workflow item {}: " + + "task {} returned action {} with message {}", + wfi.getID(), task.name, action, message); } } @@ -247,8 +276,7 @@ protected List resolveContacts(Context c, List contacts, // decode contacts if ("$flowgroup".equals(contact)) { // special literal for current flowgoup - ClaimedTask claimedTask = claimedTaskService.findByWorkflowIdAndEPerson(c, wfi, c.getCurrentUser()); - String stepID = claimedTask.getStepID(); + String stepID = getFlowStep(c, wfi).step; Step step; try { Workflow workflow = workflowFactory.getWorkflow(wfi.getCollection()); @@ -258,19 +286,26 @@ protected List resolveContacts(Context c, List contacts, String.valueOf(wfi.getID()), e); return epList; } - RoleMembers roleMembers = step.getRole().getMembers(c, wfi); - for (EPerson ep : roleMembers.getEPersons()) { - epList.add(ep); - } - for (Group group : roleMembers.getGroups()) { - epList.addAll(group.getMembers()); + Role role = step.getRole(); + if (null != role) { + RoleMembers roleMembers = role.getMembers(c, wfi); + for (EPerson ep : roleMembers.getEPersons()) { + epList.add(ep); + } + for (Group group : roleMembers.getGroups()) { + epList.addAll(group.getMembers()); + } + } else { + epList.add(ePersonService.getSystemEPerson(c)); } } else if ("$colladmin".equals(contact)) { + // special literal for collection administrators Group adGroup = wfi.getCollection().getAdministrators(); if (adGroup != null) { epList.addAll(groupService.allMembers(c, adGroup)); } } else if ("$siteadmin".equals(contact)) { + // special literal for site administrator EPerson siteEp = ePersonService.findByEmail(c, configurationService.getProperty("mail.admin")); if (siteEp != null) { diff --git a/dspace-api/src/main/java/org/dspace/curate/service/XmlWorkflowCuratorService.java b/dspace-api/src/main/java/org/dspace/curate/service/XmlWorkflowCuratorService.java index 2ad1eac12904..778b779cfe03 100644 --- a/dspace-api/src/main/java/org/dspace/curate/service/XmlWorkflowCuratorService.java +++ b/dspace-api/src/main/java/org/dspace/curate/service/XmlWorkflowCuratorService.java @@ -42,9 +42,9 @@ public boolean needsCuration(Context c, XmlWorkflowItem wfi) * * @param c the context * @param wfi the workflow item - * @return true if curation was completed or not required, + * @return true if curation was completed or not required; * false if tasks were queued for later completion, - * or item was rejected + * or item was rejected. * @throws AuthorizeException if authorization error * @throws IOException if IO error * @throws SQLException if database error @@ -58,7 +58,9 @@ public boolean doCuration(Context c, XmlWorkflowItem wfi) * @param curator the curation context * @param c the user context * @param wfId the workflow item's ID - * @return true if curation failed. + * @return true if curation curation was completed or not required; + * false if tasks were queued for later completion, + * or item was rejected. * @throws AuthorizeException if authorization error * @throws IOException if IO error * @throws SQLException if database error @@ -72,7 +74,9 @@ public boolean curate(Curator curator, Context c, String wfId) * @param curator the curation context * @param c the user context * @param wfi the workflow item - * @return true if curation failed. + * @return true if workflow curation was completed or not required; + * false if tasks were queued for later completion, + * or item was rejected. * @throws AuthorizeException if authorization error * @throws IOException if IO error * @throws SQLException if database error diff --git a/dspace-api/src/main/java/org/dspace/deduplication/service/DeduplicationService.java b/dspace-api/src/main/java/org/dspace/deduplication/service/DeduplicationService.java index ab36dc46b4c1..b2826998cccc 100644 --- a/dspace-api/src/main/java/org/dspace/deduplication/service/DeduplicationService.java +++ b/dspace-api/src/main/java/org/dspace/deduplication/service/DeduplicationService.java @@ -15,6 +15,7 @@ import org.dspace.deduplication.Deduplication; public interface DeduplicationService { + /** * Create a new Deduplication object * @@ -23,7 +24,7 @@ public interface DeduplicationService { * @throws SQLException An exception that provides information on a database * access error or other errors. */ - public Deduplication create(Context context, Deduplication dedup) throws SQLException; + Deduplication create(Context context, Deduplication dedup) throws SQLException; /*** * Return all deduplication objects @@ -35,7 +36,7 @@ public interface DeduplicationService { * @throws SQLException An exception that provides information on a database * access error or other errors. */ - public List findAll(Context context, int pageSize, int offset) throws SQLException; + List findAll(Context context, int pageSize, int offset) throws SQLException; /** * Count all accounts. @@ -55,11 +56,11 @@ public interface DeduplicationService { * @throws SQLException An exception that provides information on a database * access error or other errors. */ - public void update(Context context, Deduplication dedup) throws SQLException; + void update(Context context, Deduplication dedup) throws SQLException; - public List getDeduplicationByFirstAndSecond(Context context, UUID firstId, UUID secondId) + List getDeduplicationByFirstAndSecond(Context context, UUID firstId, UUID secondId) throws SQLException; - public Deduplication uniqueDeduplicationByFirstAndSecond(Context context, UUID firstId, UUID secondId) + Deduplication uniqueDeduplicationByFirstAndSecond(Context context, UUID firstId, UUID secondId) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/discovery/FullTextContentStreams.java b/dspace-api/src/main/java/org/dspace/discovery/FullTextContentStreams.java index ee220e5a4fdf..21468def6866 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/FullTextContentStreams.java +++ b/dspace-api/src/main/java/org/dspace/discovery/FullTextContentStreams.java @@ -76,14 +76,19 @@ private void buildFullTextList(Item parentItem) { if (StringUtils.equals(FULLTEXT_BUNDLE, myBundle.getName())) { // a-ha! grab the text out of the bitstreams List bitstreams = myBundle.getBitstreams(); + log.debug("Processing full-text bitstreams. Item handle: " + sourceInfo); for (Bitstream fulltextBitstream : emptyIfNull(bitstreams)) { fullTextStreams.add(new FullTextBitstream(sourceInfo, fulltextBitstream)); - log.debug("Added BitStream: " - + fulltextBitstream.getStoreNumber() + " " - + fulltextBitstream.getSequenceID() + " " - + fulltextBitstream.getName()); + if (fulltextBitstream != null) { + log.debug("Added BitStream: " + + fulltextBitstream.getStoreNumber() + " " + + fulltextBitstream.getSequenceID() + " " + + fulltextBitstream.getName()); + } else { + log.error("Found a NULL bitstream when processing full-text files: item handle:" + sourceInfo); + } } } } @@ -158,16 +163,16 @@ public FullTextBitstream(final String parentHandle, final Bitstream file) { } public String getContentType(final Context context) throws SQLException { - BitstreamFormat format = bitstream.getFormat(context); + BitstreamFormat format = bitstream != null ? bitstream.getFormat(context) : null; return format == null ? null : StringUtils.trimToEmpty(format.getMIMEType()); } public String getFileName() { - return StringUtils.trimToEmpty(bitstream.getName()); + return bitstream != null ? StringUtils.trimToEmpty(bitstream.getName()) : null; } public long getSize() { - return bitstream.getSizeBytes(); + return bitstream != null ? bitstream.getSizeBytes() : -1; } public InputStream getInputStream() throws SQLException, IOException, AuthorizeException { diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java b/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java index edc8b942139c..a4ed68dbc808 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java @@ -59,37 +59,18 @@ public void internalRun() throws Exception { * new DSpace.getServiceManager().getServiceByName("org.dspace.discovery.SolrIndexer"); */ - if (indexClientOptions == IndexClientOptions.REMOVE) { - handler.logInfo("Removing " + commandLine.getOptionValue("r") + " from Index"); - indexer.unIndexContent(context, commandLine.getOptionValue("r")); - } else if (indexClientOptions == IndexClientOptions.CLEAN) { - handler.logInfo("Cleaning Index"); - indexer.cleanIndex(); - } else if (indexClientOptions == IndexClientOptions.DELETE) { - handler.logInfo("Deleting Index"); - indexer.deleteIndex(); - } else if (indexClientOptions == IndexClientOptions.BUILD || - indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) { - handler.logInfo("(Re)building index from scratch."); - indexer.deleteIndex(); - indexer.createIndex(context); - if (indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) { - checkRebuildSpellCheck(commandLine, indexer); - } - } else if (indexClientOptions == IndexClientOptions.OPTIMIZE) { - handler.logInfo("Optimizing search core."); - indexer.optimize(); - } else if (indexClientOptions == IndexClientOptions.SPELLCHECK) { - checkRebuildSpellCheck(commandLine, indexer); - } else if (indexClientOptions == IndexClientOptions.INDEX) { - final String param = commandLine.getOptionValue('i'); + Optional indexableObject = Optional.empty(); + + if (indexClientOptions == IndexClientOptions.REMOVE || indexClientOptions == IndexClientOptions.INDEX) { + final String param = indexClientOptions == IndexClientOptions.REMOVE ? commandLine.getOptionValue('r') : + commandLine.getOptionValue('i'); UUID uuid = null; try { uuid = UUID.fromString(param); } catch (Exception e) { - // nothing to do, it should be an handle + // nothing to do, it should be a handle } - Optional indexableObject = Optional.empty(); + if (uuid != null) { final Item item = ContentServiceFactory.getInstance().getItemService().find(context, uuid); if (item != null) { @@ -121,7 +102,32 @@ public void internalRun() throws Exception { if (!indexableObject.isPresent()) { throw new IllegalArgumentException("Cannot resolve " + param + " to a DSpace object"); } - handler.logInfo("Indexing " + param + " force " + commandLine.hasOption("f")); + } + + if (indexClientOptions == IndexClientOptions.REMOVE) { + handler.logInfo("Removing " + commandLine.getOptionValue("r") + " from Index"); + indexer.unIndexContent(context, indexableObject.get().getUniqueIndexID()); + } else if (indexClientOptions == IndexClientOptions.CLEAN) { + handler.logInfo("Cleaning Index"); + indexer.cleanIndex(); + } else if (indexClientOptions == IndexClientOptions.DELETE) { + handler.logInfo("Deleting Index"); + indexer.deleteIndex(); + } else if (indexClientOptions == IndexClientOptions.BUILD || + indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) { + handler.logInfo("(Re)building index from scratch."); + indexer.deleteIndex(); + indexer.createIndex(context); + if (indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) { + checkRebuildSpellCheck(commandLine, indexer); + } + } else if (indexClientOptions == IndexClientOptions.OPTIMIZE) { + handler.logInfo("Optimizing search core."); + indexer.optimize(); + } else if (indexClientOptions == IndexClientOptions.SPELLCHECK) { + checkRebuildSpellCheck(commandLine, indexer); + } else if (indexClientOptions == IndexClientOptions.INDEX) { + handler.logInfo("Indexing " + commandLine.getOptionValue('i') + " force " + commandLine.hasOption("f")); final long startTimeMillis = System.currentTimeMillis(); final long count = indexAll(indexer, ContentServiceFactory.getInstance(). getItemService(), context, indexableObject.get()); @@ -185,7 +191,7 @@ private static long indexAll(final IndexingService indexingService, indexingService.indexContent(context, dso, true, true); count++; if (dso.getIndexedObject() instanceof Community) { - final Community community = (Community) dso; + final Community community = (Community) dso.getIndexedObject(); final String communityHandle = community.getHandle(); for (final Community subcommunity : community.getSubcommunities()) { count += indexAll(indexingService, itemService, context, new IndexableCommunity(subcommunity)); diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java index 8bf3cf2aba62..8707b733a637 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java @@ -7,22 +7,14 @@ */ package org.dspace.discovery; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link IndexClient} script */ public class IndexDiscoveryScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -30,15 +22,6 @@ public Class getDspaceRunnableClass() { return dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java b/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java index 702c3ff4a2aa..c8ad15281a32 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java @@ -154,7 +154,11 @@ public void consume(Context ctx, Event event) throws Exception { case Event.REMOVE: case Event.ADD: - if (object == null) { + // At this time, ADD and REMOVE actions are ignored on SITE object. They are only triggered for + // top-level communities. No action is necessary as Community itself is indexed (or deleted) separately. + if (event.getSubjectType() == Constants.SITE) { + log.debug(event.getEventTypeAsString() + " event triggered for Site object. Skipping it."); + } else if (object == null) { log.warn(event.getEventTypeAsString() + " event, could not get object for " + event.getObjectTypeAsString() + " id=" + event.getObjectID() @@ -201,6 +205,10 @@ public void consume(Context ctx, Event event) throws Exception { @Override public void end(Context ctx) throws Exception { + // Change the mode to readonly to improve performance + Context.Mode originalMode = ctx.getCurrentMode(); + ctx.setMode(Context.Mode.READ_ONLY); + try { for (String uid : uniqueIdsToDelete) { try { @@ -230,6 +238,8 @@ public void end(Context ctx) throws Exception { uniqueIdsToDelete.clear(); createdItemsToUpdate.clear(); } + + ctx.setMode(originalMode); } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexingUtils.java b/dspace-api/src/main/java/org/dspace/discovery/IndexingUtils.java index 8dd02f5d44e0..aa90ccf4a371 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexingUtils.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexingUtils.java @@ -107,6 +107,10 @@ static List findDirectlyAuthorizedGroupAndEPersonPrefixedIds( ArrayList prefixedIds = new ArrayList<>(); for (int auth : authorizations) { for (ResourcePolicy policy : authService.getPoliciesActionFilter(context, obj, auth)) { + // Avoid NPE in cases where the policy does not have group or eperson + if (policy.getGroup() == null && policy.getEPerson() == null) { + continue; + } String prefixedId = policy.getGroup() == null ? "e" + policy.getEPerson().getID() : "g" + policy.getGroup().getID(); diff --git a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java index d86116a9dcd6..a40b32b5fb8c 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java @@ -22,6 +22,9 @@ import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.content.WorkspaceItem; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.DSpaceObjectService; +import org.dspace.core.Context; import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfigurationService; import org.dspace.discovery.utils.DiscoverQueryBuilder; @@ -77,35 +80,80 @@ public static void clearCachedSearchService() { searchService = null; } + /** + * Retrieves the Discovery Configuration for a null context, prefix and DSpace object. + * This will result in returning the default configuration + * @return the default configuration + */ public static DiscoveryConfiguration getDiscoveryConfiguration() { - return getDiscoveryConfiguration(null, null); + return getDiscoveryConfiguration(null, null, null); } - public static DiscoveryConfiguration getDiscoveryConfiguration(DSpaceObject dso) { - return getDiscoveryConfiguration(null, dso); + /** + * Retrieves the Discovery Configuration with a null prefix for a DSpace object. + * @param context + * the dabase context + * @param dso + * the DSpace object + * @return the Discovery Configuration for the specified DSpace object + */ + public static DiscoveryConfiguration getDiscoveryConfiguration(Context context, DSpaceObject dso) { + return getDiscoveryConfiguration(context, null, dso); } /** * Return the discovery configuration to use in a specific scope for the king of search identified by the prefix. A * null prefix mean the normal query, other predefined values are workspace or workflow - * + * + * + * @param context + * the database context * @param prefix * the namespace of the configuration to lookup if any * @param dso * the DSpaceObject * @return the discovery configuration for the specified scope */ - public static DiscoveryConfiguration getDiscoveryConfiguration(String prefix, DSpaceObject dso) { + public static DiscoveryConfiguration getDiscoveryConfiguration(Context context, String prefix, + DSpaceObject dso) { if (prefix != null) { return getDiscoveryConfigurationByName(dso != null ? prefix + "." + dso.getHandle() : prefix); } else { - return getDiscoveryConfigurationByName(dso != null ? dso.getHandle() : null); + return getDiscoveryConfigurationByDSO(context, dso); } } + /** + * Retrieve the configuration for the current dspace object and all its parents and add it to the provided set + * @param context - The database context + * @param configurations - The set of configurations to add the retrieved configurations to + * @param prefix - The namespace of the configuration to lookup if any + * @param dso - The DSpace Object + * @return the set of configurations with additional retrieved ones for the dspace object and parents + * @throws SQLException + */ + public static Set addDiscoveryConfigurationForParents( + Context context, Set configurations, String prefix, DSpaceObject dso) + throws SQLException { + if (dso == null) { + configurations.add(getDiscoveryConfigurationByName(null)); + return configurations; + } + if (prefix != null) { + configurations.add(getDiscoveryConfigurationByName(prefix + "." + dso.getHandle())); + } else { + configurations.add(getDiscoveryConfigurationByName(dso.getHandle())); + } + + DSpaceObjectService dSpaceObjectService = ContentServiceFactory.getInstance() + .getDSpaceObjectService(dso); + DSpaceObject parentObject = dSpaceObjectService.getParentObject(context, dso); + return addDiscoveryConfigurationForParents(context, configurations, prefix, parentObject); + } + /** * Return the discovery configuration identified by the specified name - * + * * @param configurationName the configuration name assigned to the bean in the * discovery.xml * @return the discovery configuration @@ -117,6 +165,18 @@ public static DiscoveryConfiguration getDiscoveryConfigurationByName( return configurationService.getDiscoveryConfigurationByNameOrDefault(configurationName); } + /** + * Return the discovery configuration for the provided DSO + * @param context - The database context + * @param dso - The DSpace object to retrieve the configuration for + * @return the discovery configuration for the provided DSO + */ + public static DiscoveryConfiguration getDiscoveryConfigurationByDSO( + Context context, DSpaceObject dso) { + DiscoveryConfigurationService configurationService = getConfigurationService(); + return configurationService.getDiscoveryDSOConfiguration(context, dso); + } + public static DiscoveryConfigurationService getConfigurationService() { ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager(); return manager @@ -131,47 +191,55 @@ public static List getIgnoredMetadataFields(int type) { * Method that retrieves a list of all the configuration objects from the given item * A configuration object can be returned for each parent community/collection * + * @param context the database context * @param item the DSpace item * @return a list of configuration objects * @throws SQLException An exception that provides information on a database access error or other errors. */ - public static List getAllDiscoveryConfigurations(Item item) throws SQLException { + public static List getAllDiscoveryConfigurations(Context context, Item item) + throws SQLException { List collections = item.getCollections(); - return getAllDiscoveryConfigurations(null, collections, item); + return getAllDiscoveryConfigurations(context, null, collections, item); } /** * Return all the discovery configuration applicable to the provided workspace item + * + * @param context * @param witem a workspace item * @return a list of discovery configuration * @throws SQLException */ - public static List getAllDiscoveryConfigurations(WorkspaceItem witem) throws SQLException { + public static List getAllDiscoveryConfigurations(final Context context, + WorkspaceItem witem) throws SQLException { List collections = new ArrayList(); collections.add(witem.getCollection()); - return getAllDiscoveryConfigurations("workspace", collections, witem.getItem()); + return getAllDiscoveryConfigurations(context, "workspace", collections, witem.getItem()); } /** * Return all the discovery configuration applicable to the provided workflow item + * + * @param context * @param witem a workflow item * @return a list of discovery configuration * @throws SQLException */ - public static List getAllDiscoveryConfigurations(WorkflowItem witem) throws SQLException { + public static List getAllDiscoveryConfigurations(final Context context, + WorkflowItem witem) throws SQLException { List collections = new ArrayList(); collections.add(witem.getCollection()); - return getAllDiscoveryConfigurations("workflow", collections, witem.getItem()); + return getAllDiscoveryConfigurations(context, "workflow", collections, witem.getItem()); } - private static List getAllDiscoveryConfigurations(String prefix, + private static List getAllDiscoveryConfigurations(final Context context, + String prefix, List collections, Item item) throws SQLException { Set result = new HashSet<>(); for (Collection collection : collections) { - DiscoveryConfiguration configuration = getDiscoveryConfiguration(prefix, collection); - result.add(configuration); + addDiscoveryConfigurationForParents(context, result, prefix, collection); } //Add alwaysIndex configurations diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceBestMatchIndexingPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceBestMatchIndexingPlugin.java index 39130e9224d2..a1830a3931c7 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceBestMatchIndexingPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceBestMatchIndexingPlugin.java @@ -68,7 +68,7 @@ protected void addIndexValueForPersonItem(Item item, SolrInputDocument document) String lastName = getMetadataValue(item, LASTNAME_FIELD); List fullNames = getMetadataValues(item, FULLNAME_FIELDS); - getAllNameVariants(firstName, lastName, fullNames) + getAllNameVariants(firstName, lastName, fullNames, item.getID().toString()) .forEach(variant -> addIndexValue(document, variant)); } diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java index ab56e4692e39..a2c3056ae38d 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java @@ -7,16 +7,31 @@ */ package org.dspace.discovery; +import java.sql.SQLException; +import java.util.Collection; import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.function.BiFunction; +import java.util.function.Consumer; +import java.util.stream.Collectors; +import java.util.stream.Stream; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; +import org.apache.commons.lang3.StringUtils; import org.apache.solr.common.SolrInputDocument; import org.dspace.content.Bitstream; +import org.dspace.content.BitstreamFormat; import org.dspace.content.Bundle; -import org.dspace.content.Item; +import org.dspace.content.MetadataField; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.MetadataValue; import org.dspace.core.Context; import org.dspace.discovery.indexobject.IndexableItem; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** *

      @@ -36,41 +51,272 @@ * * * @author Martin Walk + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + * */ public class SolrServiceFileInfoPlugin implements SolrServiceIndexPlugin { - private static final Logger log = LogManager.getLogger(SolrServiceFileInfoPlugin.class); + /** + * Class used to map a target metadata into a solr index using {@code SolrInputDocument} + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + * + * @param + */ + private static class SolrFieldMetadataMapper { + private final String solrField; + private final BiFunction> fieldAdder; + + public SolrFieldMetadataMapper( + String metadata, + BiFunction> fieldAdder + ) { + super(); + this.solrField = metadata; + this.fieldAdder = fieldAdder; + } + + public void map(SolrInputDocument document, T value) { + this.fieldAdder.apply(document, this.solrField).accept(value); + } + + } + + private static final Logger logger = LoggerFactory.getLogger(SolrServiceFileInfoPlugin.class); + + private static final DateTimeFormatter dtf = DateTimeFormat.forPattern("yyyy-MM-dd"); private static final String BUNDLE_NAME = "ORIGINAL"; private static final String SOLR_FIELD_NAME_FOR_FILENAMES = "original_bundle_filenames"; private static final String SOLR_FIELD_NAME_FOR_DESCRIPTIONS = "original_bundle_descriptions"; + private static final String SOLR_FIELD_NAME_FOR_OAIRE_LICENSE_CONDITION = "original_bundle_oaire_licenseCondition"; + private static final String SOLR_FIELD_NAME_FOR_DATACITE_RIGHTS = "original_bundle_datacite_rights"; + private static final String SOLR_FIELD_NAME_FOR_DATACITE_AVAILABLE = "original_bundle_datacite_available"; + private static final String SOLR_FIELD_NAME_FOR_MIMETYPE = "original_bundle_mime_type"; + private static final String SOLR_FIELD_NAME_FOR_CHECKSUM = "original_bundle_checksum"; + private static final String SOLR_FIELD_NAME_FOR_SIZEBYTES = "original_bundle_sizebytes"; + private static final String SOLR_FIELD_NAME_FOR_SHORT_DESCRIPTION = "original_bundle_short_description"; + private static final String SOLR_POSTFIX_FILTER = "_filter"; + private static final String SOLR_POSTFIX_KEYWORD = "_keyword"; + private static final String BITSTREAM_METADATA_SOLR_PREFIX_KEYWORD = "bitstreams."; + // used for facets and filters of type Date to correctly search them and visualize in facets. + private static final String SOLR_POSTFIX_YEAR = ".year"; + private static final MetadataFieldName METADATA_DATACITE_RIGHTS = new MetadataFieldName("datacite", "rights"); + private static final MetadataFieldName METADATA_DATACITE_AVAILABLE = new MetadataFieldName("datacite", "available"); + private static final MetadataFieldName METADATA_LICENSE_CONDITION = + new MetadataFieldName("oaire", "licenseCondition"); + + private static final BiFunction> defaultSolrIndexAdder = + (document, fieldName) -> value -> { + Collection fieldValues = document.getFieldValues(fieldName); + if (fieldValues == null || !fieldValues.contains(value)) { + addField(document, fieldName, value); + addField(document, fieldName.concat(SOLR_POSTFIX_KEYWORD), value); + addField(document, fieldName.concat(SOLR_POSTFIX_FILTER), value); + } + }; + + private static final BiFunction> simpleSolrIndexAdder = + (document, fieldName) -> value -> { + Collection fieldValues = document.getFieldValues(fieldName); + if (fieldValues == null || !fieldValues.contains(value)) { + addField(document, fieldName, value); + } + }; + + private static final BiFunction> bitstreamMetadataSolrIndexAdder = + (document, fieldName) -> value -> { + String baseIndex = BITSTREAM_METADATA_SOLR_PREFIX_KEYWORD.concat(fieldName); + Collection fieldValues = document.getFieldValues(baseIndex); + if (fieldValues == null || !fieldValues.contains(value)) { + addField(document, baseIndex, value); + addField(document, baseIndex.concat(SOLR_POSTFIX_KEYWORD), value); + addField(document, baseIndex.concat(SOLR_POSTFIX_FILTER), value); + } + }; + + private static final BiFunction> yearSolrIndexAdder = + (document, fieldName) -> value -> { + Collection fieldValues = document.getFieldValues(fieldName); + if (fieldValues == null || !fieldValues.contains(value)) { + addField(document, fieldName, value); + addField(document, fieldName.concat(SOLR_POSTFIX_KEYWORD), value); + addField(document, fieldName.concat(SOLR_POSTFIX_FILTER), value); + addField(document, fieldName.concat(SOLR_POSTFIX_YEAR), dtf.parseLocalDate(value).getYear()); + } + }; + + private static final SolrFieldMetadataMapper getFieldMapper( + String solrField, + BiFunction> adder + ) { + return new SolrFieldMetadataMapper(solrField, adder); + } + + private static final SolrFieldMetadataMapper OAIRE_LICENSE_MAPPER = + new SolrFieldMetadataMapper( + SOLR_FIELD_NAME_FOR_OAIRE_LICENSE_CONDITION, + defaultSolrIndexAdder + ); + + private static final SolrFieldMetadataMapper DATACITE_RIGHTS_MAPPER = + new SolrFieldMetadataMapper( + SOLR_FIELD_NAME_FOR_DATACITE_RIGHTS, + defaultSolrIndexAdder + ); + + private static final SolrFieldMetadataMapper DATACITE_AVAILABLE_MAPPER = + new SolrFieldMetadataMapper( + SOLR_FIELD_NAME_FOR_DATACITE_AVAILABLE, + yearSolrIndexAdder + ); + + private static final Map> mappableMetadatas = + Stream.of( + Map.entry(METADATA_LICENSE_CONDITION.toString(), OAIRE_LICENSE_MAPPER), + Map.entry(METADATA_DATACITE_RIGHTS.toString(), DATACITE_RIGHTS_MAPPER), + Map.entry(METADATA_DATACITE_AVAILABLE.toString(), DATACITE_AVAILABLE_MAPPER) + ) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + + + private static void addField(SolrInputDocument document, String name, Object value) { + document.addField(name, value); + } @Override public void additionalIndex(Context context, IndexableObject indexableObject, SolrInputDocument document) { if (indexableObject instanceof IndexableItem) { - Item item = ((IndexableItem) indexableObject).getIndexedObject(); - List bundles = item.getBundles(); - if (bundles != null) { - for (Bundle bundle : bundles) { - String bundleName = bundle.getName(); - if ((bundleName != null) && bundleName.equals(BUNDLE_NAME)) { - List bitstreams = bundle.getBitstreams(); - if (bitstreams != null) { - for (Bitstream bitstream : bitstreams) { - try { - document.addField(SOLR_FIELD_NAME_FOR_FILENAMES, bitstream.getName()); - - String description = bitstream.getDescription(); - if ((description != null) && !description.isEmpty()) { - document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS, description); - } - } catch (Exception e) { - log.warn("Error occurred during update index for item {}", item.getID()); - } - } - } - } + generateBundleIndex(context, document, ((IndexableItem) indexableObject).getIndexedObject().getBundles()); + } + } + + private void generateBundleIndex(Context context, SolrInputDocument document, List bundles) { + if (bundles != null) { + for (Bundle bundle : bundles) { + String bundleName = bundle.getName(); + if (bundleName != null && bundleName.equals(BUNDLE_NAME)) { + generateBitstreamIndex(context, document, bundle.getBitstreams()); } } } } -} \ No newline at end of file + + /** + * Method that adds index to {@link SolrInputDocument}, iterates between {@code bitstreams} and {@code mappableMetadatas} + * then applies the corresponding mapping function to the bitstream + * + * @param document solr document + * @param bitstreams list of bitstreams to analyze + */ + private void generateBitstreamIndex(Context context, SolrInputDocument document, List bitstreams) { + if (document != null && bitstreams != null) { + for (Bitstream bitstream : bitstreams) { + + indexBitstreamFields(context, document, bitstream); + + indexBitstreamsMetadatadas(document, bitstream); + } + } + } + + private void indexBitstreamFields(Context context, SolrInputDocument document, Bitstream bitstream) { + addAndHandleException( + simpleSolrIndexAdder, document, bitstream, SOLR_FIELD_NAME_FOR_FILENAMES, bitstream.getName() + ); + + Optional.ofNullable(bitstream.getDescription()) + .filter(StringUtils::isNotEmpty) + .ifPresent( + (description) -> + addAndHandleException( + simpleSolrIndexAdder, document, bitstream, SOLR_FIELD_NAME_FOR_DESCRIPTIONS, description + ) + ); + + try { + Optional formatOptional = + Optional.ofNullable(bitstream.getFormat(context)) + .filter(Objects::nonNull); + + formatOptional + .map(BitstreamFormat::getMIMEType) + .filter(StringUtils::isNotBlank) + .ifPresent(format -> + addAndHandleException( + defaultSolrIndexAdder, document, bitstream, SOLR_FIELD_NAME_FOR_MIMETYPE, format + ) + ); + + formatOptional + .map(BitstreamFormat::getShortDescription) + .ifPresent(format -> + addAndHandleException( + simpleSolrIndexAdder, document, bitstream, SOLR_FIELD_NAME_FOR_SHORT_DESCRIPTION, format + ) + ); + } catch (SQLException e) { + logger.error("Error while retrievig bitstream format", e); + throw new RuntimeException("Error while retrievig bitstream format", e); + } + + Optional.ofNullable(bitstream.getChecksum()) + .filter(StringUtils::isNotBlank) + .map(checksum -> bitstream.getChecksumAlgorithm() + ":" + bitstream.getChecksum()) + .ifPresent(checksum -> + addAndHandleException( + defaultSolrIndexAdder, document, bitstream, SOLR_FIELD_NAME_FOR_CHECKSUM, checksum + ) + ); + + Optional.ofNullable(bitstream.getSizeBytes()) + .filter(l -> l > 0) + .map(String::valueOf) + .ifPresent(size -> + addAndHandleException( + simpleSolrIndexAdder, document, bitstream, SOLR_FIELD_NAME_FOR_SIZEBYTES, size + ) + ); + } + + protected void addAndHandleException( + BiFunction> solrIndexAdder, + SolrInputDocument document, Bitstream bitstream, + String field, String value + ) { + try { + solrIndexAdder.apply(document, field).accept(value); + } catch (Exception e) { + logger.warn( + "Error occurred during the update of index field {} for bitstream {}", + field, + bitstream.getID() + ); + } + } + + private void indexBitstreamsMetadatadas(SolrInputDocument document, Bitstream bitstream) { + bitstream + .getMetadata() + .stream() + .filter(metadata -> metadata != null && StringUtils.isNotBlank(metadata.getValue())) + .forEach(metadata -> { + MetadataField metadataField = metadata.getMetadataField(); + String bitstreamMetadata = metadataField.toString('.'); + Optional.ofNullable(mappableMetadatas.get(bitstreamMetadata)) + .filter(Objects::nonNull) + .orElse( + getFieldMapper( + metadataField.toString(), + bitstreamMetadataSolrIndexAdder + ) + ) + .map(document, metadata.getValue()); + }); + } + + private boolean areEquals(MetadataFieldName metadataFieldName, MetadataValue metadata) { + return StringUtils.equals(metadataFieldName.schema, metadata.getSchema()) && + StringUtils.equals(metadataFieldName.element, metadata.getElement()) && + StringUtils.equals(metadataFieldName.qualifier, metadata.getQualifier()); + } +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java index bcc169a6df9e..01a641778be3 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java @@ -272,7 +272,12 @@ public void unIndexContent(Context context, String searchUniqueID, boolean commi try { if (solrSearchCore.getSolr() != null) { - indexObjectServiceFactory.getIndexableObjectFactory(searchUniqueID).delete(searchUniqueID); + IndexFactory index = indexObjectServiceFactory.getIndexableObjectFactory(searchUniqueID); + if (index != null) { + index.delete(searchUniqueID); + } else { + log.warn("Object not found in Solr index: " + searchUniqueID); + } if (commit) { solrSearchCore.getSolr().commit(); } @@ -1055,9 +1060,8 @@ protected DiscoverResult retrieveResult(Context context, DiscoverQuery query) //Add information about our search fields for (String field : searchFields) { List valuesAsString = new ArrayList<>(); - for (Object o : doc.getFieldValues(field)) { - valuesAsString.add(String.valueOf(o)); - } + Optional.ofNullable(doc.getFieldValues(field)) + .ifPresent(l -> l.forEach(o -> valuesAsString.add(String.valueOf(o)))); resultDoc.addSearchField(field, valuesAsString.toArray(new String[valuesAsString.size()])); } result.addSearchDocument(indexableObject, resultDoc); diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceValuePairsIndexPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceValuePairsIndexPlugin.java index 213d6547d958..606044bea613 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceValuePairsIndexPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceValuePairsIndexPlugin.java @@ -36,6 +36,7 @@ import org.dspace.discovery.configuration.MultiLanguageDiscoverSearchFilterFacet; import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.services.ConfigurationService; +import org.dspace.web.ContextUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -160,7 +161,7 @@ private List findSearchFiltersByMetadataField(Item item, private List getAllDiscoveryConfiguration(Item item) { try { - return SearchUtils.getAllDiscoveryConfigurations(item); + return SearchUtils.getAllDiscoveryConfigurations(ContextUtil.obtainCurrentRequestContext(), item); } catch (SQLException e) { throw new SQLRuntimeException(e); } diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index 8e9c1a77aeb5..796f1c43f4d4 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -7,12 +7,23 @@ */ package org.dspace.discovery.configuration; +import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.DSpaceObject; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.DSpaceObjectService; +import org.dspace.core.Context; import org.dspace.discovery.IndexableObject; import org.dspace.discovery.indexobject.IndexableDSpaceObject; import org.dspace.services.factory.DSpaceServicesFactory; @@ -22,9 +33,18 @@ */ public class DiscoveryConfigurationService { + private static final Logger log = LogManager.getLogger(); + private Map map; private Map> toIgnoreMetadataFields = new HashMap<>(); + /** + * Discovery configurations, cached by Community/Collection UUID. When a Community or Collection does not have its + * own configuration, we take the one of the first parent that does. + * This cache ensures we do not have to go up the hierarchy every time. + */ + private final Map comColToDiscoveryConfigurationMap = new ConcurrentHashMap<>(); + public Map getMap() { return map; } @@ -51,24 +71,98 @@ public void setToIgnoreMetadataFields(Map> toIgnoreMetadat this.toIgnoreMetadataFields = toIgnoreMetadataFields; } - public DiscoveryConfiguration getDiscoveryConfiguration(IndexableObject dso) { + /** + * Retrieve the discovery configuration for the provided IndexableObject. When a DSpace Object can be retrieved from + * the IndexableObject, the discovery configuration will be returned for the DSpace Object. Otherwise, a check will + * be done to look for the unique index ID of the IndexableObject. When the IndexableObject is null, the default + * configuration will be retrieved + * + * When no direct match is found, the parent object will + * be checked until there is no parent left, in which case the "default" configuration will be returned. + * @param context - The database context + * @param indexableObject - The IndexableObject to retrieve the configuration for + * @return the discovery configuration for the provided IndexableObject. + */ + public DiscoveryConfiguration getDiscoveryConfiguration(Context context, IndexableObject indexableObject) { String name; - if (dso == null) { - name = "default"; - } else if (dso instanceof IndexableDSpaceObject) { - name = ((IndexableDSpaceObject) dso).getIndexedObject().getHandle(); + if (indexableObject == null) { + return getDiscoveryConfiguration(null); + } else if (indexableObject instanceof IndexableDSpaceObject) { + return getDiscoveryDSOConfiguration(context, ((IndexableDSpaceObject) indexableObject).getIndexedObject()); } else { - name = dso.getUniqueIndexID(); + name = indexableObject.getUniqueIndexID(); + } + return getDiscoveryConfiguration(name); + } + + /** + * Retrieve the discovery configuration for the provided DSO. When no direct match is found, the parent object will + * be checked until there is no parent left, in which case the "default" configuration will be returned. + * @param context - The database context + * @param dso - The DSpace object to retrieve the configuration for + * @return the discovery configuration for the provided DSO. + */ + public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context, DSpaceObject dso) { + // Fall back to default configuration + if (dso == null) { + return getDiscoveryConfiguration(null, true); } - return getDiscoveryConfigurationByNameOrDefault(name); + // Attempt to retrieve cached configuration by UUID + if (comColToDiscoveryConfigurationMap.containsKey(dso.getID())) { + return comColToDiscoveryConfigurationMap.get(dso.getID()); + } + + DiscoveryConfiguration configuration; + + // Attempt to retrieve configuration by DSO handle + configuration = getDiscoveryConfiguration(dso.getHandle(), false); + + if (configuration == null) { + // Recurse up the Comm/Coll hierarchy until a configuration is found + DSpaceObjectService dSpaceObjectService = + ContentServiceFactory.getInstance().getDSpaceObjectService(dso); + DSpaceObject parentObject = null; + try { + parentObject = dSpaceObjectService.getParentObject(context, dso); + } catch (SQLException e) { + log.error(e); + } + configuration = getDiscoveryDSOConfiguration(context, parentObject); + } + + // Cache the resulting configuration when the DSO is a Community or Collection + if (dso instanceof Community || dso instanceof Collection) { + comColToDiscoveryConfigurationMap.put(dso.getID(), configuration); + } + + return configuration; } - public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDefault(final String name) { + /** + * Retrieve the Discovery Configuration for the provided name. When no configuration can be found for the name, the + * default configuration will be returned. + * @param name - The name of the configuration to be retrieved + * @return the Discovery Configuration for the provided name, or default when none was found. + */ + public DiscoveryConfiguration getDiscoveryConfiguration(String name) { + return getDiscoveryConfiguration(name, true); + } + + /** + * Retrieve the configuration for the provided name. When useDefault is set to true, the "default" configuration + * will be returned when no match is found. When useDefault is set to false, null will be returned when no match is + * found. + * @param name - The name of the configuration to retrieve + * @param useDefault - Whether the default configuration should be used when no match is found + * @return the configuration for the provided name + */ + public DiscoveryConfiguration getDiscoveryConfiguration(final String name, boolean useDefault) { + DiscoveryConfiguration result; - DiscoveryConfiguration result = getDiscoveryConfigurationByName(name); + result = StringUtils.isBlank(name) ? null : getMap().get(name); - if (result == null) { + if (result == null && useDefault) { //No specific configuration, get the default one result = getMap().get("default"); } @@ -76,8 +170,33 @@ public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDefault(final Str return result; } + /** + * Retrieve the Discovery configuration for the provided name or IndexableObject. The configuration will first be + * checked for the provided name. When no match is found for the name, the configuration will be retrieved for the + * IndexableObject + * + * @param context - The database context + * @param configurationName - The name of the configuration to be retrieved + * @param indexableObject - The indexable object to retrieve the configuration for + * @return the Discovery configuration for the provided name, or when not found for the provided IndexableObject + */ + public DiscoveryConfiguration getDiscoveryConfigurationByNameOrIndexableObject(Context context, + String configurationName, + IndexableObject indexableObject) { + if (StringUtils.isNotBlank(configurationName) && getMap().containsKey(configurationName)) { + return getMap().get(configurationName); + } else { + return getDiscoveryConfiguration(context, indexableObject); + } + } + + + public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDefault(final String name) { + return this.getDiscoveryConfiguration(name, true); + } + public DiscoveryConfiguration getDiscoveryConfigurationByName(String name) { - return StringUtils.isBlank(name) ? null : getMap().get(name); + return this.getDiscoveryConfiguration(name, false); } public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDso(final String configurationName, @@ -85,7 +204,7 @@ public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDso(final String if (StringUtils.isNotBlank(configurationName) && getMap().containsKey(configurationName)) { return getMap().get(configurationName); } else { - return getDiscoveryConfiguration(dso); + return getDiscoveryConfiguration(null, dso); } } @@ -105,6 +224,18 @@ public List getIndexAlwaysConfigurations() { return configs; } + /** + * @return All configurations for {@link org.dspace.discovery.configuration.DiscoverySearchFilterFacet} + */ + public List getAllFacetsConfig() { + List configs = new ArrayList<>(); + for (String key : map.keySet()) { + DiscoveryConfiguration config = map.get(key); + configs.addAll(config.getSidebarFacets()); + } + return configs; + } + public static void main(String[] args) { System.out.println(DSpaceServicesFactory.getInstance().getServiceManager().getServicesNames().size()); DiscoveryConfigurationService mainService = DSpaceServicesFactory.getInstance().getServiceManager() diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationUtilsService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationUtilsService.java index e2e83920eb70..3db9e04c2694 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationUtilsService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationUtilsService.java @@ -52,6 +52,7 @@ public Iterator findByRelation(Context context, Item item, String relation DiscoverQuery discoverQuery = new DiscoverQuery(); discoverQuery.setDSpaceObjectFilter(IndexableItem.TYPE); discoverQuery.setDiscoveryConfigurationName(discoveryConfiguration.getId()); + discoverQuery.setScopeObject(new IndexableItem(item)); List defaultFilterQueries = discoveryConfiguration.getDefaultFilterQueries(); for (String defaultFilterQuery : defaultFilterQueries) { discoverQuery.addFilterQueries(MessageFormat.format(defaultFilterQuery, item.getID())); diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java index 3a730b34dd36..b05f65bca592 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java @@ -7,12 +7,11 @@ */ package org.dspace.discovery.configuration; -import static org.apache.commons.collections4.CollectionUtils.isEmpty; import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; -import static org.apache.commons.lang3.StringUtils.isBlank; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nullable; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; @@ -26,6 +25,11 @@ public class DiscoverySortConfiguration { private List sortFields = new ArrayList(); + /** + * Default sort configuration to use when needed + */ + @Nullable private DiscoverySortFieldConfiguration defaultSortField; + public List getSortFields() { return sortFields; } @@ -34,6 +38,14 @@ public void setSortFields(List sortFields) { this.sortFields = sortFields; } + public DiscoverySortFieldConfiguration getDefaultSortField() { + return defaultSortField; + } + + public void setDefaultSortField(DiscoverySortFieldConfiguration configuration) { + this.defaultSortField = configuration; + } + public DiscoverySortFieldConfiguration getSortFieldConfiguration(String sortField) { if (StringUtils.isBlank(sortField)) { return null; @@ -56,15 +68,4 @@ public DiscoverySortFieldConfiguration getSortFieldConfiguration(String sortFiel public String getDefaultSortDirection() { return isNotEmpty(getSortFields()) ? getSortFields().get(0).getDefaultSortOrder().name() : null; } - - public String getDefaultSortField() { - if (isEmpty(getSortFields())) { - return SCORE; - } - DiscoverySortFieldConfiguration defaultSort = getSortFields().get(0); - if (isBlank(defaultSort.getMetadataField())) { - return SCORE; - } - return defaultSort.getMetadataField(); - } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java index cc867bbf21e0..c4258efdeea5 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java @@ -86,7 +86,7 @@ public SolrInputDocument buildDocument(Context context, IndexableCollection inde final Collection collection = indexableCollection.getIndexedObject(); // Retrieve configuration - DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(collection); + DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, collection); DiscoveryHitHighlightingConfiguration highlightingConfiguration = discoveryConfiguration .getHitHighlightingConfiguration(); List highlightedMetadataFields = new ArrayList<>(); @@ -173,4 +173,4 @@ public List getCollectionLocations(Context context, Collection collectio return locations; } -} \ No newline at end of file +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CommunityIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CommunityIndexFactoryImpl.java index 8521b7dda0de..e92819601839 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CommunityIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CommunityIndexFactoryImpl.java @@ -69,7 +69,7 @@ public SolrInputDocument buildDocument(Context context, IndexableCommunity index final Community community = indexableObject.getIndexedObject(); // Retrieve configuration - DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(community); + DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, community); DiscoveryHitHighlightingConfiguration highlightingConfiguration = discoveryConfiguration .getHitHighlightingConfiguration(); List highlightedMetadataFields = new ArrayList<>(); @@ -135,4 +135,4 @@ public List getLocations(Context context, IndexableCommunity indexableDS return locations; } -} \ No newline at end of file +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java index 8a24b997ffae..f24e9875f006 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java @@ -80,11 +80,13 @@ public void storeInprogressItemFields(Context context, SolrInputDocument doc, // Add item metadata List discoveryConfigurations; if (inProgressSubmission instanceof WorkflowItem) { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkflowItem) inProgressSubmission); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, + (WorkflowItem) inProgressSubmission); } else if (inProgressSubmission instanceof WorkspaceItem) { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkspaceItem) inProgressSubmission); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, + (WorkspaceItem) inProgressSubmission); } else { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, item); } indexableItemService.addDiscoveryFields(doc, context, item, discoveryConfigurations); indexableCollectionService.storeCommunityCollectionLocations(doc, locations); diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java index 82247bb972d2..9959aa7cb746 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java @@ -31,6 +31,7 @@ import org.apache.logging.log4j.Logger; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.common.SolrInputDocument; +import org.dspace.authority.service.AuthorityValueService; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.Item; @@ -93,6 +94,8 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item); + List discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, item); addDiscoveryFields(doc, context, indexableItem.getIndexedObject(), discoveryConfigurations); //mandatory facet to show status on mydspace @@ -391,7 +394,11 @@ public void addDiscoveryFields(SolrInputDocument doc, Context context, Item item Boolean.FALSE), true); - if (!ignorePrefered && hasChoiceAuthority) { + if ( + !ignorePrefered && + hasChoiceAuthority && + !authority.startsWith(AuthorityValueService.GENERATE) + ) { try { preferedLabel = choiceAuthorityService.getLabel(meta, Constants.ITEM, collection, meta.getLanguage()); @@ -948,7 +955,7 @@ private void indexIfFilterTypeFacet(SolrInputDocument doc, DiscoverySearchFilter private void saveFacetPrefixParts(SolrInputDocument doc, DiscoverySearchFilter searchFilter, String value, String separator, String authority, String preferedLabel) { value = StringUtils.normalizeSpace(value); - Pattern pattern = Pattern.compile("\\b\\w+\\b", Pattern.CASE_INSENSITIVE); + Pattern pattern = Pattern.compile("\\b\\w+\\b", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CHARACTER_CLASS); Matcher matcher = pattern.matcher(value); while (matcher.find()) { int index = matcher.start(); diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java index d98d4a154977..7b933780bfbc 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java @@ -68,6 +68,7 @@ public SolrInputDocument buildDocument(Context context, IndexableMetadataField i // add read permission on doc for anonymous group doc.addField("read", "g" + anonymousGroup.getID()); } + doc.addField(FIELD_NAME_VARIATIONS + "_sort", fieldName); return doc; } diff --git a/dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java b/dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java index ca067452ab6c..6263ec152326 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java +++ b/dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java @@ -383,7 +383,9 @@ private boolean isConfigured(String sortBy, DiscoverySortConfiguration searchSor } private String getDefaultSortDirection(DiscoverySortConfiguration searchSortConfiguration, String sortOrder) { - if (Objects.nonNull(searchSortConfiguration.getSortFields()) && + if (searchSortConfiguration.getDefaultSortField() != null) { + sortOrder = searchSortConfiguration.getDefaultSortField().getDefaultSortOrder().name(); + } else if (Objects.nonNull(searchSortConfiguration.getSortFields()) && !searchSortConfiguration.getSortFields().isEmpty()) { sortOrder = searchSortConfiguration.getSortFields().get(0).getDefaultSortOrder().name(); } @@ -393,7 +395,9 @@ private String getDefaultSortDirection(DiscoverySortConfiguration searchSortConf private String getDefaultSortField(DiscoverySortConfiguration searchSortConfiguration) { String sortBy;// Attempt to find the default one, if none found we use SCORE sortBy = "score"; - if (Objects.nonNull(searchSortConfiguration.getSortFields()) && + if (searchSortConfiguration.getDefaultSortField() != null) { + sortBy = searchSortConfiguration.getDefaultSortField().getMetadataField(); + } else if (Objects.nonNull(searchSortConfiguration.getSortFields()) && !searchSortConfiguration.getSortFields().isEmpty()) { DiscoverySortFieldConfiguration defaultSort = searchSortConfiguration.getSortFields().get(0); if (StringUtils.isBlank(defaultSort.getMetadataField())) { diff --git a/dspace-api/src/main/java/org/dspace/eperson/AccountServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/AccountServiceImpl.java index 283f101f2ba5..8be6aac7e392 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/AccountServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/AccountServiceImpl.java @@ -11,25 +11,36 @@ import java.sql.SQLException; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.Objects; +import java.util.Optional; import java.util.UUID; +import java.util.function.BiConsumer; +import java.util.function.Consumer; +import java.util.stream.Stream; import javax.mail.MessagingException; import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.authenticate.service.AuthenticationService; import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.MetadataValueService; import org.dspace.core.Context; import org.dspace.core.Email; import org.dspace.core.I18nUtil; import org.dspace.core.Utils; +import org.dspace.eperson.dto.RegistrationDataPatch; import org.dspace.eperson.service.AccountService; import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.RegistrationDataService; import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.log.LogMessage; /** * Methods for handling registration by email and forgotten passwords. When @@ -50,8 +61,16 @@ public class AccountServiceImpl implements AccountService { * log4j log */ private static final Logger log = LogManager.getLogger(AccountServiceImpl.class); + + private static final Map> allowedMergeArguments = + Map.of( + "email", + (RegistrationData registrationData, EPerson eperson) -> eperson.setEmail(registrationData.getEmail()) + ); + @Autowired(required = true) protected EPersonService ePersonService; + @Autowired(required = true) protected RegistrationDataService registrationDataService; @Autowired @@ -63,6 +82,9 @@ public class AccountServiceImpl implements AccountService { @Autowired private AuthenticationService authenticationService; + @Autowired + private MetadataValueService metadataValueService; + protected AccountServiceImpl() { } @@ -79,9 +101,9 @@ protected AccountServiceImpl() { * * @param context DSpace context * @param email Email address to send the registration email to - * @throws java.sql.SQLException passed through. - * @throws java.io.IOException passed through. - * @throws javax.mail.MessagingException passed through. + * @throws java.sql.SQLException passed through. + * @throws java.io.IOException passed through. + * @throws javax.mail.MessagingException passed through. * @throws org.dspace.authorize.AuthorizeException passed through. */ @Override @@ -94,7 +116,7 @@ public void sendRegistrationInfo(Context context, String email, List group if (!authenticationService.canSelfRegister(context, null, email)) { throw new IllegalStateException("self registration is not allowed with this email address"); } - sendInfo(context, email, groups, true, true); + sendInfo(context, email, groups, RegistrationTypeEnum.REGISTER, true); } /** @@ -108,19 +130,36 @@ public void sendRegistrationInfo(Context context, String email, List group *
    • Authorization error (throws AuthorizeException).
    • * * - * * @param context DSpace context * @param email Email address to send the forgot-password email to - * @throws java.sql.SQLException passed through. - * @throws java.io.IOException passed through. - * @throws javax.mail.MessagingException passed through. + * @throws java.sql.SQLException passed through. + * @throws java.io.IOException passed through. + * @throws javax.mail.MessagingException passed through. * @throws org.dspace.authorize.AuthorizeException passed through. */ @Override public void sendForgotPasswordInfo(Context context, String email, List groups) - throws SQLException, IOException, MessagingException, - AuthorizeException { - sendInfo(context, email, groups, false, true); + throws SQLException, IOException, MessagingException, AuthorizeException { + sendInfo(context, email, groups, RegistrationTypeEnum.FORGOT, true); + } + + /** + * Checks if exists an account related to the token provided + * + * @param context DSpace context + * @param token Account token + * @return true if exists, false otherwise + * @throws SQLException + * @throws AuthorizeException + */ + @Override + public boolean existsAccountFor(Context context, String token) throws SQLException, AuthorizeException { + return getEPerson(context, token) != null; + } + + @Override + public boolean existsAccountWithEmail(Context context, String email) throws SQLException { + return ePersonService.findByEmail(context, email) != null; } /** @@ -137,8 +176,8 @@ public void sendForgotPasswordInfo(Context context, String email, List gro * @param context DSpace context * @param token Account token * @return The EPerson corresponding to token, or null. - * @throws SQLException If the token or eperson cannot be retrieved from the - * database. + * @throws SQLException If the token or eperson cannot be retrieved from the + * database. * @throws AuthorizeException passed through. */ @Override @@ -192,6 +231,239 @@ public void deleteToken(Context context, String token) registrationDataService.deleteByToken(context, token); } + public EPerson mergeRegistration(Context context, UUID personId, String token, List overrides) + throws AuthorizeException, SQLException { + + RegistrationData registrationData = getRegistrationData(context, token); + EPerson eperson = null; + if (personId != null) { + eperson = ePersonService.findByIdOrLegacyId(context, personId.toString()); + } + + if (!canCreateUserBy(context, registrationData.getRegistrationType())) { + throw new AuthorizeException("Token type invalid for the current user."); + } + + if (hasLoggedEPerson(context) && !isSameContextEPerson(context, eperson)) { + throw new AuthorizeException("Only the user with id: " + personId + " can make this action."); + } + + context.turnOffAuthorisationSystem(); + + eperson = Optional.ofNullable(eperson).orElseGet(() -> createEPerson(context, registrationData)); + updateValuesFromRegistration(context, eperson, registrationData, overrides); + addEPersonToGroups(context, eperson, registrationData.getGroups()); + deleteToken(context, token); + ePersonService.update(context, eperson); + + context.commit(); + context.restoreAuthSystemState(); + + return eperson; + } + + private EPerson createEPerson(Context context, RegistrationData registrationData) { + EPerson eperson; + try { + eperson = ePersonService.create(context); + + eperson.setNetid(registrationData.getNetId()); + eperson.setEmail(registrationData.getEmail()); + + RegistrationDataMetadata firstName = + registrationDataService.getMetadataByMetadataString( + registrationData, + "eperson.firstname" + ); + if (firstName != null) { + eperson.setFirstName(context, firstName.getValue()); + } + + RegistrationDataMetadata lastName = + registrationDataService.getMetadataByMetadataString( + registrationData, + "eperson.lastname" + ); + if (lastName != null) { + eperson.setLastName(context, lastName.getValue()); + } + eperson.setCanLogIn(true); + eperson.setSelfRegistered(true); + } catch (SQLException | AuthorizeException e) { + throw new RuntimeException( + "Cannote create the eperson linked to the token: " + registrationData.getToken(), + e + ); + } + return eperson; + } + + private boolean hasLoggedEPerson(Context context) { + return context.getCurrentUser() != null; + } + + private boolean isSameContextEPerson(Context context, EPerson eperson) { + return eperson.equals(context.getCurrentUser()); + } + + + @Override + public RegistrationData renewRegistrationForEmail( + Context context, RegistrationDataPatch registrationDataPatch + ) throws AuthorizeException { + try { + RegistrationData newRegistration = registrationDataService.clone(context, registrationDataPatch); + registrationDataService.delete(context, registrationDataPatch.getOldRegistration()); + fillAndSendEmail(context, newRegistration); + return newRegistration; + } catch (SQLException | MessagingException | IOException e) { + log.error(e); + throw new RuntimeException(e); + } + } + + private boolean isEmailConfirmed(RegistrationData oldRegistration, String email) { + return email.equals(oldRegistration.getEmail()); + } + + @Override + public boolean isTokenValidForCreation(RegistrationData registrationData) { + return ( + isExternalRegistrationToken(registrationData.getRegistrationType()) || + isValidationToken(registrationData.getRegistrationType()) + ) && + StringUtils.isNotBlank(registrationData.getNetId()); + } + + private boolean canCreateUserBy(Context context, RegistrationTypeEnum registrationTypeEnum) { + return isValidationToken(registrationTypeEnum) || + canCreateUserFromExternalRegistrationToken(context, registrationTypeEnum); + } + + private static boolean canCreateUserFromExternalRegistrationToken( + Context context, RegistrationTypeEnum registrationTypeEnum + ) { + return context.getCurrentUser() != null && isExternalRegistrationToken(registrationTypeEnum); + } + + private static boolean isExternalRegistrationToken(RegistrationTypeEnum registrationTypeEnum) { + return RegistrationTypeEnum.ORCID.equals(registrationTypeEnum); + } + + private static boolean isValidationToken(RegistrationTypeEnum registrationTypeEnum) { + return RegistrationTypeEnum.VALIDATION_ORCID.equals(registrationTypeEnum); + } + + + protected void updateValuesFromRegistration( + Context context, EPerson eperson, RegistrationData registrationData, List overrides + ) { + Stream.concat( + getMergeActions(registrationData, overrides), + getUpdateActions(context, eperson, registrationData) + ).forEach(c -> c.accept(eperson)); + } + + private Stream> getMergeActions(RegistrationData registrationData, List overrides) { + if (overrides == null || overrides.isEmpty()) { + return Stream.empty(); + } + return overrides.stream().map(f -> mergeField(f, registrationData)); + } + + protected Stream> getUpdateActions( + Context context, EPerson eperson, RegistrationData registrationData + ) { + Stream.Builder> actions = Stream.builder(); + if (eperson.getNetid() == null) { + actions.add(p -> p.setNetid(registrationData.getNetId())); + } + if (eperson.getEmail() == null) { + actions.add(p -> p.setEmail(registrationData.getEmail())); + } + for (RegistrationDataMetadata metadatum : registrationData.getMetadata()) { + Optional> epersonMetadata = + Optional.ofNullable( + ePersonService.getMetadataByMetadataString( + eperson, metadatum.getMetadataField().toString('.') + ) + ).filter(l -> !l.isEmpty()); + if (epersonMetadata.isEmpty()) { + actions.add(p -> addMetadataValue(context, metadatum, p)); + } + } + return actions.build(); + } + + private List addMetadataValue(Context context, RegistrationDataMetadata metadatum, EPerson p) { + try { + return ePersonService.addMetadata( + context, p, metadatum.getMetadataField(), Item.ANY, List.of(metadatum.getValue()) + ); + } catch (SQLException e) { + throw new RuntimeException( + "Could not add metadata" + metadatum.getMetadataField() + " to eperson with uuid: " + p.getID(), e); + } + } + + protected Consumer mergeField(String field, RegistrationData registrationData) { + return person -> + allowedMergeArguments.getOrDefault( + field, + mergeRegistrationMetadata(field) + ).accept(registrationData, person); + } + + protected BiConsumer mergeRegistrationMetadata(String field) { + return (registrationData, person) -> { + RegistrationDataMetadata registrationMetadata = getMetadataOrThrow(registrationData, field); + MetadataValue metadata = getMetadataOrThrow(person, field); + metadata.setValue(registrationMetadata.getValue()); + ePersonService.setMetadataModified(person); + }; + } + + private RegistrationDataMetadata getMetadataOrThrow(RegistrationData registrationData, String field) { + return registrationDataService.getMetadataByMetadataString(registrationData, field); + } + + private MetadataValue getMetadataOrThrow(EPerson eperson, String field) { + return ePersonService.getMetadataByMetadataString(eperson, field).stream().findFirst() + .orElseThrow( + () -> new IllegalArgumentException( + "Could not find the metadata field: " + field + " for eperson: " + eperson.getID()) + ); + } + + + protected void addEPersonToGroups(Context context, EPerson eperson, List groups) { + if (CollectionUtils.isEmpty(groups)) { + return; + } + for (Group group : groups) { + groupService.addMember(context, group, eperson); + } + } + + private RegistrationData getRegistrationData(Context context, String token) + throws SQLException, AuthorizeException { + return Optional.ofNullable(registrationDataService.findByToken(context, token)) + .filter(rd -> + isValid(rd) || + !isValidationToken(rd.getRegistrationType()) + ) + .orElseThrow( + () -> new AuthorizeException( + "The registration token: " + token + " is not valid!" + ) + ); + } + + private boolean isValid(RegistrationData rd) { + return registrationDataService.isValid(rd); + } + + /** * THIS IS AN INTERNAL METHOD. THE SEND PARAMETER ALLOWS IT TO BE USED FOR * TESTING PURPOSES. @@ -204,8 +476,7 @@ public void deleteToken(Context context, String token) * * @param context DSpace context * @param email Email address to send the forgot-password email to - * @param isRegister If true, this is for registration; otherwise, it is - * for forgot-password + * @param type Type of registration {@link RegistrationTypeEnum} * @param send If true, send email; otherwise do not send any email * @return null if no EPerson with that email found * @throws SQLException Cannot create registration data in database @@ -213,16 +484,17 @@ public void deleteToken(Context context, String token) * @throws IOException Error reading email template * @throws AuthorizeException Authorization error */ - protected RegistrationData sendInfo(Context context, String email, List groups, - boolean isRegister, boolean send) throws SQLException, IOException, - MessagingException, AuthorizeException { + protected RegistrationData sendInfo( + Context context, String email, List groups, RegistrationTypeEnum type, boolean send + ) throws SQLException, IOException, MessagingException, AuthorizeException { // See if a registration token already exists for this user - RegistrationData rd = registrationDataService.findByEmail(context, email); - + RegistrationData rd = registrationDataService.findBy(context, email, type); + boolean isRegister = RegistrationTypeEnum.REGISTER.equals(type); // If it already exists, just re-issue it if (rd == null) { rd = registrationDataService.create(context); + rd.setRegistrationType(type); rd.setToken(Utils.generateHexKey()); // don't set expiration date any more @@ -250,7 +522,7 @@ protected RegistrationData sendInfo(Context context, String email, List gr } } if (send) { - sendEmail(context, email, isRegister, rd); + fillAndSendEmail(context, email, isRegister, rd); } return rd; @@ -271,22 +543,19 @@ protected RegistrationData sendInfo(Context context, String email, List gr * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. * @throws SQLException An exception that provides information on a database access error or other errors. */ - protected void sendEmail(Context context, String email, boolean isRegister, RegistrationData rd) + protected void fillAndSendEmail(Context context, String email, boolean isRegister, RegistrationData rd) throws MessagingException, IOException, SQLException { String base = configurationService.getProperty("dspace.ui.url"); // Note change from "key=" to "token=" - String specialLink = new StringBuffer().append(base).append( - base.endsWith("/") ? "" : "/").append( - isRegister ? "register" : (rd.getGroups().size() == 0) ? "forgot" : "invitation").append("/") - .append(rd.getToken()) - .toString(); + String specialLink = getSpecialLink( + base, rd, isRegister ? "register" : ((rd.getGroups().size() == 0) ? "forgot" : "invitation") + ); + Locale locale = context.getCurrentLocale(); - Email bean = Email.getEmail(I18nUtil.getEmailFilename(locale, isRegister ? "register" - : "change_password")); - bean.addRecipient(email); - bean.addArgument(specialLink); - bean.send(); + String emailFilename = I18nUtil.getEmailFilename(locale, isRegister ? "register" : "change_password"); + + fillAndSendEmail(email, emailFilename, specialLink); // Breadcrumbs if (log.isInfoEnabled()) { @@ -294,4 +563,38 @@ protected void sendEmail(Context context, String email, boolean isRegister, Regi + " information to " + email); } } + + private static String getSpecialLink(String base, RegistrationData rd, String subPath) { + return new StringBuffer(base) + .append(base.endsWith("/") ? "" : "/") + .append(subPath) + .append("/") + .append(rd.getToken()) + .toString(); + } + + protected void fillAndSendEmail( + Context context, RegistrationData rd + ) throws MessagingException, IOException { + String base = configurationService.getProperty("dspace.ui.url"); + + // Note change from "key=" to "token=" + String specialLink = getSpecialLink(base, rd, rd.getRegistrationType().getLink()); + + String emailFilename = I18nUtil.getEmailFilename( + context.getCurrentLocale(), rd.getRegistrationType().toString().toLowerCase() + ); + + fillAndSendEmail(rd.getEmail(), emailFilename, specialLink); + + log.info(LogMessage.of(() -> "Sent " + rd.getRegistrationType().getLink() + " link to " + rd.getEmail())); + } + + protected void fillAndSendEmail(String email, String emailFilename, String specialLink) + throws IOException, MessagingException { + Email bean = Email.getEmail(emailFilename); + bean.addRecipient(email); + bean.addArgument(specialLink); + bean.send(); + } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java index cf62c805b12c..2188dcb81579 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java @@ -46,6 +46,7 @@ import org.dspace.eperson.service.SubscribeService; import org.dspace.event.Event; import org.dspace.orcid.service.OrcidTokenService; +import org.dspace.services.ConfigurationService; import org.dspace.util.UUIDUtils; import org.dspace.versioning.Version; import org.dspace.versioning.VersionHistory; @@ -101,6 +102,8 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl impleme @Autowired(required = true) protected ClaimedTaskService claimedTaskService; @Autowired(required = true) + protected ConfigurationService configurationService; + @Autowired(required = true) protected MetadataSchemaService metadataSchemaService; @Autowired protected OrcidTokenService orcidTokenService; @@ -114,6 +117,30 @@ public EPerson find(Context context, UUID id) throws SQLException { return ePersonDAO.findByID(context, EPerson.class, id); } + /** + * Create a fake EPerson which can receive email. Its address will be the + * value of "mail.admin", or "postmaster" if all else fails. + * @param c + * @return + * @throws SQLException + */ + @Override + public EPerson getSystemEPerson(Context c) + throws SQLException { + String adminEmail = configurationService.getProperty("mail.admin"); + if (null == adminEmail) { + adminEmail = "postmaster"; // Last-ditch attempt to send *somewhere* + } + EPerson systemEPerson = findByEmail(c, adminEmail); + + if (null == systemEPerson) { + systemEPerson = new EPerson(); + systemEPerson.setEmail(adminEmail); + } + + return systemEPerson; + } + @Override public EPerson findByIdOrLegacyId(Context context, String id) throws SQLException { if (StringUtils.isNumeric(id)) { @@ -158,32 +185,98 @@ public List search(Context context, String query) throws SQLException { @Override public List search(Context context, String query, int offset, int limit) throws SQLException { - try { - List ePerson = new ArrayList<>(); - EPerson person = find(context, UUID.fromString(query)); + List ePersons = new ArrayList<>(); + UUID uuid = UUIDUtils.fromString(query); + if (uuid == null) { + // Search by firstname & lastname (NOTE: email will also be included automatically) + MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null); + MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null); + if (StringUtils.isBlank(query)) { + query = null; + } + ePersons = ePersonDAO.search(context, query, Arrays.asList(firstNameField, lastNameField), + Arrays.asList(firstNameField, lastNameField), offset, limit); + } else { + // Search by UUID + EPerson person = find(context, uuid); if (person != null) { - ePerson.add(person); + ePersons.add(person); } - return ePerson; - } catch (IllegalArgumentException e) { + } + return ePersons; + } + + @Override + public int searchResultCount(Context context, String query) throws SQLException { + int result = 0; + UUID uuid = UUIDUtils.fromString(query); + if (uuid == null) { + // Count results found by firstname & lastname (email is also included automatically) MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null); MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null); if (StringUtils.isBlank(query)) { query = null; } - return ePersonDAO.search(context, query, Arrays.asList(firstNameField, lastNameField), - Arrays.asList(firstNameField, lastNameField), offset, limit); + result = ePersonDAO.searchResultCount(context, query, Arrays.asList(firstNameField, lastNameField)); + } else { + // Search by UUID + EPerson person = find(context, uuid); + if (person != null) { + result = 1; + } } + return result; } @Override - public int searchResultCount(Context context, String query) throws SQLException { - MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null); - MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null); - if (StringUtils.isBlank(query)) { - query = null; + public List searchNonMembers(Context context, String query, Group excludeGroup, int offset, int limit) + throws SQLException { + List ePersons = new ArrayList<>(); + UUID uuid = UUIDUtils.fromString(query); + if (uuid == null) { + // Search by firstname & lastname (NOTE: email will also be included automatically) + MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null); + MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null); + if (StringUtils.isBlank(query)) { + query = null; + } + ePersons = ePersonDAO.searchNotMember(context, query, Arrays.asList(firstNameField, lastNameField), + excludeGroup, Arrays.asList(firstNameField, lastNameField), + offset, limit); + } else { + // Search by UUID + EPerson person = find(context, uuid); + // Verify EPerson is NOT a member of the given excludeGroup before adding + if (person != null && !groupService.isDirectMember(excludeGroup, person)) { + ePersons.add(person); + } } - return ePersonDAO.searchResultCount(context, query, Arrays.asList(firstNameField, lastNameField)); + + return ePersons; + } + + @Override + public int searchNonMembersCount(Context context, String query, Group excludeGroup) throws SQLException { + int result = 0; + UUID uuid = UUIDUtils.fromString(query); + if (uuid == null) { + // Count results found by firstname & lastname (email is also included automatically) + MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null); + MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null); + if (StringUtils.isBlank(query)) { + query = null; + } + result = ePersonDAO.searchNotMemberCount(context, query, Arrays.asList(firstNameField, lastNameField), + excludeGroup); + } else { + // Search by UUID + EPerson person = find(context, uuid); + // Verify EPerson is NOT a member of the given excludeGroup before counting + if (person != null && !groupService.isDirectMember(excludeGroup, person)) { + result = 1; + } + } + return result; } @Override @@ -279,10 +372,13 @@ public void delete(Context context, EPerson ePerson, boolean cascade) throw new AuthorizeException( "You must be an admin to delete an EPerson"); } + // Get all workflow-related groups that the current EPerson belongs to Set workFlowGroups = getAllWorkFlowGroups(context, ePerson); for (Group group: workFlowGroups) { - List ePeople = groupService.allMembers(context, group); - if (ePeople.size() == 1 && ePeople.contains(ePerson)) { + // Get total number of unique EPerson objs who are a member of this group (or subgroup) + int totalMembers = groupService.countAllMembers(context, group); + // If only one EPerson is a member, then we cannot delete the last member of this group. + if (totalMembers == 1) { throw new EmptyWorkflowGroupException(ePerson.getID(), group.getID()); } } @@ -541,14 +637,29 @@ public List getDeleteConstraints(Context context, EPerson ePerson) throw @Override public List findByGroups(Context c, Set groups) throws SQLException { + return findByGroups(c, groups, -1, -1); + } + + @Override + public List findByGroups(Context c, Set groups, int pageSize, int offset) throws SQLException { //Make sure we at least have one group, if not don't even bother searching. if (CollectionUtils.isNotEmpty(groups)) { - return ePersonDAO.findByGroups(c, groups); + return ePersonDAO.findByGroups(c, groups, pageSize, offset); } else { return new ArrayList<>(); } } + @Override + public int countByGroups(Context c, Set groups) throws SQLException { + //Make sure we at least have one group, if not don't even bother counting. + if (CollectionUtils.isNotEmpty(groups)) { + return ePersonDAO.countByGroups(c, groups); + } else { + return 0; + } + } + @Override public List findEPeopleWithSubscription(Context context) throws SQLException { return ePersonDAO.findAllSubscribers(context); @@ -609,4 +720,8 @@ private List getDSpaceObjectOwnerMetadataValues(Item item) { public String getName(EPerson dso) { return dso.getName(); } + + public boolean exists(Context context, UUID id) throws SQLException { + return this.ePersonDAO.exists(context, EPerson.class, id); + } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/Groomer.java b/dspace-api/src/main/java/org/dspace/eperson/Groomer.java index 2a828cdc12b4..5485bb1d0ca9 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/Groomer.java +++ b/dspace-api/src/main/java/org/dspace/eperson/Groomer.java @@ -141,20 +141,10 @@ private static void aging(CommandLine command) throws SQLException { System.out.println(); if (delete) { - List whyNot = ePersonService.getDeleteConstraints(myContext, account); - if (!whyNot.isEmpty()) { - System.out.print("\tCannot be deleted; referenced in"); - for (String table : whyNot) { - System.out.print(' '); - System.out.print(table); - } - System.out.println(); - } else { - try { - ePersonService.delete(myContext, account); - } catch (AuthorizeException | IOException ex) { - System.err.println(ex.getMessage()); - } + try { + ePersonService.delete(myContext, account); + } catch (AuthorizeException | IOException ex) { + System.err.println(ex.getMessage()); } } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/Group.java b/dspace-api/src/main/java/org/dspace/eperson/Group.java index 8fe4f94f9647..9cf810164425 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/Group.java +++ b/dspace-api/src/main/java/org/dspace/eperson/Group.java @@ -98,7 +98,11 @@ void addMember(EPerson e) { } /** - * Return EPerson members of a Group + * Return EPerson members of a Group. + *

      + * WARNING: This method may have bad performance for Groups with large numbers of EPerson members. + * Therefore, only use this when you need to access every EPerson member. Instead, consider using + * EPersonService.findByGroups() for a paginated list of EPersons. * * @return list of EPersons */ @@ -144,9 +148,13 @@ List getParentGroups() { } /** - * Return Group members of a Group. + * Return Group members (i.e. direct subgroups) of a Group. + *

      + * WARNING: This method may have bad performance for Groups with large numbers of Subgroups. + * Therefore, only use this when you need to access every Subgroup. Instead, consider using + * GroupService.findByParent() for a paginated list of Subgroups. * - * @return list of groups + * @return list of subgroups */ public List getMemberGroups() { return groups; diff --git a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java index 9fda372b4f2a..a05c73a070fa 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java @@ -179,8 +179,13 @@ public void removeMember(Context context, Group group, EPerson ePerson) throws S for (CollectionRole collectionRole : collectionRoles) { if (StringUtils.equals(collectionRole.getRoleId(), role.getId()) && claimedTask.getWorkflowItem().getCollection() == collectionRole.getCollection()) { - List ePeople = allMembers(context, group); - if (ePeople.size() == 1 && ePeople.contains(ePerson)) { + // Count number of EPersons who are *direct* members of this group + int totalDirectEPersons = ePersonService.countByGroups(context, Set.of(group)); + // Count number of Groups which have this groupParent as a direct parent + int totalChildGroups = countByParent(context, group); + // If this group has only one direct EPerson and *zero* child groups, then we cannot delete the + // EPerson or we will leave this group empty. + if (totalDirectEPersons == 1 && totalChildGroups == 0) { throw new IllegalStateException( "Refused to remove user " + ePerson .getID() + " from workflow group because the group " + group @@ -191,8 +196,13 @@ public void removeMember(Context context, Group group, EPerson ePerson) throws S } } if (!poolTasks.isEmpty()) { - List ePeople = allMembers(context, group); - if (ePeople.size() == 1 && ePeople.contains(ePerson)) { + // Count number of EPersons who are *direct* members of this group + int totalDirectEPersons = ePersonService.countByGroups(context, Set.of(group)); + // Count number of Groups which have this groupParent as a direct parent + int totalChildGroups = countByParent(context, group); + // If this group has only one direct EPerson and *zero* child groups, then we cannot delete the + // EPerson or we will leave this group empty. + if (totalDirectEPersons == 1 && totalChildGroups == 0) { throw new IllegalStateException( "Refused to remove user " + ePerson .getID() + " from workflow group because the group " + group @@ -212,9 +222,13 @@ public void removeMember(Context context, Group groupParent, Group childGroup) t if (!collectionRoles.isEmpty()) { List poolTasks = poolTaskService.findByGroup(context, groupParent); if (!poolTasks.isEmpty()) { - List parentPeople = allMembers(context, groupParent); - List childPeople = allMembers(context, childGroup); - if (childPeople.containsAll(parentPeople)) { + // Count number of Groups which have this groupParent as a direct parent + int totalChildGroups = countByParent(context, groupParent); + // Count number of EPersons who are *direct* members of this group + int totalDirectEPersons = ePersonService.countByGroups(context, Set.of(groupParent)); + // If this group has only one childGroup and *zero* direct EPersons, then we cannot delete the + // childGroup or we will leave this group empty. + if (totalChildGroups == 1 && totalDirectEPersons == 0) { throw new IllegalStateException( "Refused to remove sub group " + childGroup .getID() + " from workflow group because the group " + groupParent @@ -353,8 +367,6 @@ public Set allMemberGroupsSet(Context context, EPerson ePerson) throws SQ List groupCache = group2GroupCacheDAO.findByChildren(context, groups); // now we have all owning groups, also grab all parents of owning groups - // yes, I know this could have been done as one big query and a union, - // but doing the Oracle port taught me to keep to simple SQL! for (Group2GroupCache group2GroupCache : groupCache) { groups.add(group2GroupCache.getParent()); } @@ -370,7 +382,8 @@ public List allMembers(Context c, Group g) throws SQLException { // Get all groups which are a member of this group List group2GroupCaches = group2GroupCacheDAO.findByParent(c, g); - Set groups = new HashSet<>(); + // Initialize HashSet based on List size to avoid Set resizing. See https://stackoverflow.com/a/21822273 + Set groups = new HashSet<>((int) (group2GroupCaches.size() / 0.75 + 1)); for (Group2GroupCache group2GroupCache : group2GroupCaches) { groups.add(group2GroupCache.getChild()); } @@ -383,6 +396,23 @@ public List allMembers(Context c, Group g) throws SQLException { return new ArrayList<>(childGroupChildren); } + @Override + public int countAllMembers(Context context, Group group) throws SQLException { + // Get all groups which are a member of this group + List group2GroupCaches = group2GroupCacheDAO.findByParent(context, group); + // Initialize HashSet based on List size + current 'group' to avoid Set resizing. + // See https://stackoverflow.com/a/21822273 + Set groups = new HashSet<>((int) ((group2GroupCaches.size() + 1) / 0.75 + 1)); + for (Group2GroupCache group2GroupCache : group2GroupCaches) { + groups.add(group2GroupCache.getChild()); + } + // Append current group as well + groups.add(group); + + // Return total number of unique EPerson objects in any of these groups + return ePersonService.countByGroups(context, groups); + } + @Override public Group find(Context context, UUID id) throws SQLException { if (id == null) { @@ -439,17 +469,17 @@ public List findAll(Context context, List metadataSortFiel } @Override - public List search(Context context, String groupIdentifier) throws SQLException { - return search(context, groupIdentifier, -1, -1); + public List search(Context context, String query) throws SQLException { + return search(context, query, -1, -1); } @Override - public List search(Context context, String groupIdentifier, int offset, int limit) throws SQLException { + public List search(Context context, String query, int offset, int limit) throws SQLException { List groups = new ArrayList<>(); - UUID uuid = UUIDUtils.fromString(groupIdentifier); + UUID uuid = UUIDUtils.fromString(query); if (uuid == null) { //Search by group name - groups = groupDAO.findByNameLike(context, groupIdentifier, offset, limit); + groups = groupDAO.findByNameLike(context, query, offset, limit); } else { //Search by group id Group group = find(context, uuid); @@ -462,12 +492,12 @@ public List search(Context context, String groupIdentifier, int offset, i } @Override - public int searchResultCount(Context context, String groupIdentifier) throws SQLException { + public int searchResultCount(Context context, String query) throws SQLException { int result = 0; - UUID uuid = UUIDUtils.fromString(groupIdentifier); + UUID uuid = UUIDUtils.fromString(query); if (uuid == null) { //Search by group name - result = groupDAO.countByNameLike(context, groupIdentifier); + result = groupDAO.countByNameLike(context, query); } else { //Search by group id Group group = find(context, uuid); @@ -479,6 +509,44 @@ public int searchResultCount(Context context, String groupIdentifier) throws SQL return result; } + @Override + public List searchNonMembers(Context context, String query, Group excludeParentGroup, + int offset, int limit) throws SQLException { + List groups = new ArrayList<>(); + UUID uuid = UUIDUtils.fromString(query); + if (uuid == null) { + // Search by group name + groups = groupDAO.findByNameLikeAndNotMember(context, query, excludeParentGroup, offset, limit); + } else if (!uuid.equals(excludeParentGroup.getID())) { + // Search by group id + Group group = find(context, uuid); + // Verify it is NOT a member of the given excludeParentGroup before adding + if (group != null && !isMember(excludeParentGroup, group)) { + groups.add(group); + } + } + + return groups; + } + + @Override + public int searchNonMembersCount(Context context, String query, Group excludeParentGroup) throws SQLException { + int result = 0; + UUID uuid = UUIDUtils.fromString(query); + if (uuid == null) { + // Search by group name + result = groupDAO.countByNameLikeAndNotMember(context, query, excludeParentGroup); + } else if (!uuid.equals(excludeParentGroup.getID())) { + // Search by group id + Group group = find(context, uuid); + // Verify it is NOT a member of the given excludeParentGroup before adding + if (group != null && !isMember(excludeParentGroup, group)) { + result = 1; + } + } + return result; + } + @Override public void delete(Context context, Group group) throws SQLException { if (group.isPermanent()) { @@ -840,4 +908,24 @@ public List findByMetadataField(final Context context, final String searc public String getName(Group dso) { return dso.getName(); } + + public boolean exists(Context context, UUID id) throws SQLException { + return this.groupDAO.exists(context, Group.class, id); + } + + @Override + public List findByParent(Context context, Group parent, int pageSize, int offset) throws SQLException { + if (parent == null) { + return null; + } + return groupDAO.findByParent(context, parent, pageSize, offset); + } + + @Override + public int countByParent(Context context, Group parent) throws SQLException { + if (parent == null) { + return 0; + } + return groupDAO.countByParent(context, parent); + } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/RegistrationData.java b/dspace-api/src/main/java/org/dspace/eperson/RegistrationData.java index 953a3e8bd0a6..2c0e1abb8238 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/RegistrationData.java +++ b/dspace-api/src/main/java/org/dspace/eperson/RegistrationData.java @@ -10,9 +10,13 @@ import java.util.ArrayList; import java.util.Date; import java.util.List; +import java.util.SortedSet; +import java.util.TreeSet; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; +import javax.persistence.EnumType; +import javax.persistence.Enumerated; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; @@ -20,6 +24,7 @@ import javax.persistence.JoinColumn; import javax.persistence.JoinTable; import javax.persistence.ManyToMany; +import javax.persistence.OneToMany; import javax.persistence.SequenceGenerator; import javax.persistence.Table; import javax.persistence.Temporal; @@ -27,6 +32,7 @@ import org.dspace.core.Context; import org.dspace.core.ReloadableEntity; +import org.hibernate.annotations.SortNatural; /** * Database entity representation of the registrationdata table @@ -43,30 +49,75 @@ public class RegistrationData implements ReloadableEntity { @SequenceGenerator(name = "registrationdata_seq", sequenceName = "registrationdata_seq", allocationSize = 1) private Integer id; - @Column(name = "email", unique = true, length = 64) + /** + * Contains the email used to register the user. + */ + @Column(name = "email", length = 64) private String email; + /** + * Contains the unique id generated fot the user. + */ @Column(name = "token", length = 48) private String token; + /** + * Expiration date of this registration data. + */ @Column(name = "expires") @Temporal(TemporalType.TIMESTAMP) private Date expires; @ManyToMany(fetch = FetchType.LAZY, cascade = {CascadeType.PERSIST}) @JoinTable( - name = "registrationdata2group", - joinColumns = {@JoinColumn(name = "registrationdata_id")}, - inverseJoinColumns = {@JoinColumn(name = "group_id")} + name = "registrationdata2group", + joinColumns = {@JoinColumn(name = "registrationdata_id")}, + inverseJoinColumns = {@JoinColumn(name = "group_id")} ) private final List groups = new ArrayList(); + + /** + * Metadata linked to this registration data + */ + @SortNatural + @OneToMany( + fetch = FetchType.LAZY, + mappedBy = "registrationData", + cascade = CascadeType.ALL, + orphanRemoval = true + ) + private SortedSet metadata = new TreeSet<>(); + + /** + * External service used to register the user. + * Allowed values are inside {@link RegistrationTypeEnum} + */ + @Column(name = "registration_type") + @Enumerated(EnumType.STRING) + private RegistrationTypeEnum registrationType; + + /** + * Contains the external id provided by the external service + * accordingly to the registration type. + */ + @Column(name = "net_id", length = 64) + private final String netId; + /** * Protected constructor, create object using: * {@link org.dspace.eperson.service.RegistrationDataService#create(Context)} */ protected RegistrationData() { + this(null); + } + /** + * Protected constructor, create object using: + * {@link org.dspace.eperson.service.RegistrationDataService#create(Context, String)} + */ + protected RegistrationData(String netId) { + this.netId = netId; } public Integer getID() { @@ -77,7 +128,7 @@ public String getEmail() { return email; } - void setEmail(String email) { + public void setEmail(String email) { this.email = email; } @@ -104,4 +155,24 @@ public List getGroups() { public void addGroup(Group group) { this.groups.add(group); } + + public RegistrationTypeEnum getRegistrationType() { + return registrationType; + } + + public void setRegistrationType(RegistrationTypeEnum registrationType) { + this.registrationType = registrationType; + } + + public SortedSet getMetadata() { + return metadata; + } + + public void setMetadata(SortedSet metadata) { + this.metadata = metadata; + } + + public String getNetId() { + return netId; + } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/RegistrationDataExpirationConfiguration.java b/dspace-api/src/main/java/org/dspace/eperson/RegistrationDataExpirationConfiguration.java new file mode 100644 index 000000000000..3bd8def0c448 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/RegistrationDataExpirationConfiguration.java @@ -0,0 +1,83 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson; + +import java.text.MessageFormat; +import java.time.Duration; +import java.time.Instant; +import java.util.Date; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public class RegistrationDataExpirationConfiguration { + + private static final String EXPIRATION_PROP = "eperson.registration-data.token.{0}.expiration"; + private static final String DURATION_FORMAT = "PT{0}"; + + public static final RegistrationDataExpirationConfiguration INSTANCE = + new RegistrationDataExpirationConfiguration(); + + public static RegistrationDataExpirationConfiguration getInstance() { + return INSTANCE; + } + + private final Map expirationMap; + + private RegistrationDataExpirationConfiguration() { + this.expirationMap = + Stream.of(RegistrationTypeEnum.values()) + .map(type -> Optional.ofNullable(getDurationOf(type)) + .map(duration -> Map.entry(type, duration)) + .orElse(null) + ) + .filter(Objects::nonNull) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + private Duration getDurationOf(RegistrationTypeEnum type) { + String format = MessageFormat.format(EXPIRATION_PROP, type.toString().toLowerCase()); + ConfigurationService config = DSpaceServicesFactory.getInstance().getConfigurationService(); + String typeValue = config.getProperty(format); + + if (StringUtils.isBlank(typeValue)) { + return null; + } + + return Duration.parse(MessageFormat.format(DURATION_FORMAT, typeValue)); + } + + public Duration getExpiration(RegistrationTypeEnum type) { + return expirationMap.get(type); + } + + public Date computeExpirationDate(RegistrationTypeEnum type) { + + if (type == null) { + return null; + } + + Duration duration = this.expirationMap.get(type); + + if (duration == null) { + return null; + } + + return Date.from(Instant.now().plus(duration)); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/eperson/RegistrationDataMetadata.java b/dspace-api/src/main/java/org/dspace/eperson/RegistrationDataMetadata.java new file mode 100644 index 000000000000..dde8428fe1fe --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/RegistrationDataMetadata.java @@ -0,0 +1,109 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.Lob; +import javax.persistence.ManyToOne; +import javax.persistence.SequenceGenerator; +import javax.persistence.Table; + +import org.dspace.content.MetadataField; +import org.dspace.core.ReloadableEntity; +import org.hibernate.annotations.Type; + +/** + * Metadata related to a registration data {@link RegistrationData} + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +@Entity +@Table(name = "registrationdata_metadata") +public class RegistrationDataMetadata implements ReloadableEntity, Comparable { + + @Id + @Column(name = "registrationdata_metadata_id") + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "registrationdata_metadatavalue_seq") + @SequenceGenerator( + name = "registrationdata_metadatavalue_seq", + sequenceName = "registrationdata_metadatavalue_seq", + allocationSize = 1 + ) + private final Integer id; + + /** + * {@link RegistrationData} linked to this metadata value + */ + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "registrationdata_id") + private RegistrationData registrationData = null; + + /** + * The linked {@link MetadataField} instance + */ + @ManyToOne + @JoinColumn(name = "metadata_field_id") + private MetadataField metadataField = null; + + /** + * Value represented by this {@link RegistrationDataMetadata} instance + * related to the metadataField {@link MetadataField} + */ + @Lob + @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Column(name = "text_value") + private String value = null; + + /** + * Protected constructor + */ + protected RegistrationDataMetadata() { + id = 0; + } + + + @Override + public Integer getID() { + return id; + } + + public MetadataField getMetadataField() { + return metadataField; + } + + void setMetadataField(MetadataField metadataField) { + this.metadataField = metadataField; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public int compareTo(RegistrationDataMetadata o) { + return Integer.compare(this.id, o.id); + } + + void setRegistrationData(RegistrationData registrationData) { + this.registrationData = registrationData; + } + + public RegistrationData getRegistrationData() { + return registrationData; + } +} diff --git a/dspace-api/src/main/java/org/dspace/eperson/RegistrationDataMetadataServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/RegistrationDataMetadataServiceImpl.java new file mode 100644 index 000000000000..34f0e5590fad --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/RegistrationDataMetadataServiceImpl.java @@ -0,0 +1,90 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson; + +import java.sql.SQLException; +import java.util.List; + +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.MetadataField; +import org.dspace.content.service.MetadataFieldService; +import org.dspace.core.Context; +import org.dspace.eperson.dao.RegistrationDataMetadataDAO; +import org.dspace.eperson.service.RegistrationDataMetadataService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public class RegistrationDataMetadataServiceImpl implements RegistrationDataMetadataService { + + @Autowired + private RegistrationDataMetadataDAO registrationDataMetadataDAO; + + @Autowired + private MetadataFieldService metadataFieldService; + + @Override + public RegistrationDataMetadata create(Context context, RegistrationData registrationData, String schema, + String element, String qualifier, String value) throws SQLException { + return create( + context, registrationData, + metadataFieldService.findByElement(context, schema, element, qualifier), + value + ); + } + + @Override + public RegistrationDataMetadata create(Context context, RegistrationData registrationData, + MetadataField metadataField) throws SQLException { + RegistrationDataMetadata metadata = new RegistrationDataMetadata(); + metadata.setRegistrationData(registrationData); + metadata.setMetadataField(metadataField); + return registrationDataMetadataDAO.create(context, metadata); + } + + @Override + public RegistrationDataMetadata create( + Context context, RegistrationData registrationData, MetadataField metadataField, String value + ) throws SQLException { + RegistrationDataMetadata metadata = new RegistrationDataMetadata(); + metadata.setRegistrationData(registrationData); + metadata.setMetadataField(metadataField); + metadata.setValue(value); + return registrationDataMetadataDAO.create(context, metadata); + } + + @Override + public RegistrationDataMetadata create(Context context) throws SQLException, AuthorizeException { + return registrationDataMetadataDAO.create(context, new RegistrationDataMetadata()); + } + + @Override + public RegistrationDataMetadata find(Context context, int id) throws SQLException { + return registrationDataMetadataDAO.findByID(context, RegistrationData.class, id); + } + + @Override + public void update(Context context, RegistrationDataMetadata registrationDataMetadata) + throws SQLException, AuthorizeException { + registrationDataMetadataDAO.save(context, registrationDataMetadata); + } + + @Override + public void update(Context context, List t) throws SQLException, AuthorizeException { + for (RegistrationDataMetadata registrationDataMetadata : t) { + update(context, registrationDataMetadata); + } + } + + @Override + public void delete(Context context, RegistrationDataMetadata registrationDataMetadata) + throws SQLException, AuthorizeException { + registrationDataMetadataDAO.delete(context, registrationDataMetadata); + } +} diff --git a/dspace-api/src/main/java/org/dspace/eperson/RegistrationDataServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/RegistrationDataServiceImpl.java index b27275168556..4448cefb1bd8 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/RegistrationDataServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/RegistrationDataServiceImpl.java @@ -9,12 +9,26 @@ import java.sql.SQLException; import java.util.Collections; +import java.util.Date; import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; import org.dspace.authorize.AuthorizeException; +import org.dspace.content.MetadataField; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.MetadataFieldService; import org.dspace.core.Context; +import org.dspace.core.Utils; +import org.dspace.core.exception.SQLRuntimeException; import org.dspace.eperson.dao.RegistrationDataDAO; +import org.dspace.eperson.dto.RegistrationDataChanges; +import org.dspace.eperson.dto.RegistrationDataPatch; +import org.dspace.eperson.service.RegistrationDataMetadataService; import org.dspace.eperson.service.RegistrationDataService; import org.springframework.beans.factory.annotation.Autowired; @@ -26,18 +40,66 @@ * @author kevinvandevelde at atmire.com */ public class RegistrationDataServiceImpl implements RegistrationDataService { - @Autowired(required = true) + @Autowired() protected RegistrationDataDAO registrationDataDAO; + @Autowired() + protected RegistrationDataMetadataService registrationDataMetadataService; + + @Autowired() + protected MetadataFieldService metadataFieldService; + + protected RegistrationDataExpirationConfiguration expirationConfiguration = + RegistrationDataExpirationConfiguration.getInstance(); + protected RegistrationDataServiceImpl() { } @Override public RegistrationData create(Context context) throws SQLException, AuthorizeException { - return registrationDataDAO.create(context, new RegistrationData()); + return create(context, null, null); + } + + + @Override + public RegistrationData create(Context context, String netId) throws SQLException, AuthorizeException { + return this.create(context, netId, null); + } + + @Override + public RegistrationData create(Context context, String netId, RegistrationTypeEnum type) + throws SQLException, AuthorizeException { + return registrationDataDAO.create(context, newInstance(netId, type, null)); } + private RegistrationData newInstance(String netId, RegistrationTypeEnum type, String email) { + RegistrationData rd = new RegistrationData(netId); + rd.setToken(Utils.generateHexKey()); + rd.setRegistrationType(type); + rd.setExpires(expirationConfiguration.computeExpirationDate(type)); + rd.setEmail(email); + return rd; + } + + @Override + public RegistrationData clone( + Context context, RegistrationDataPatch registrationDataPatch + ) throws SQLException, AuthorizeException { + RegistrationData old = registrationDataPatch.getOldRegistration(); + RegistrationDataChanges changes = registrationDataPatch.getChanges(); + RegistrationData rd = newInstance(old.getNetId(), changes.getRegistrationType(), changes.getEmail()); + + for (RegistrationDataMetadata metadata : old.getMetadata()) { + addMetadata(context, rd, metadata.getMetadataField(), metadata.getValue()); + } + + return registrationDataDAO.create(context, rd); + } + + private boolean isEmailConfirmed(RegistrationData old, String newEmail) { + return newEmail.equals(old.getEmail()); + } @Override public RegistrationData findByToken(Context context, String token) throws SQLException { @@ -49,12 +111,124 @@ public RegistrationData findByEmail(Context context, String email) throws SQLExc return registrationDataDAO.findByEmail(context, email); } + @Override + public RegistrationData findBy(Context context, String email, RegistrationTypeEnum type) throws SQLException { + return registrationDataDAO.findBy(context, email, type); + } + @Override public void deleteByToken(Context context, String token) throws SQLException { registrationDataDAO.deleteByToken(context, token); } + @Override + public Stream>> groupEpersonMetadataByRegistrationData( + EPerson ePerson, RegistrationData registrationData + ) + throws SQLException { + Map> epersonMeta = + ePerson.getMetadata() + .stream() + .collect( + Collectors.groupingBy( + MetadataValue::getMetadataField + ) + ); + return registrationData.getMetadata() + .stream() + .map(meta -> + Map.entry( + meta, + Optional.ofNullable(epersonMeta.get(meta.getMetadataField())) + .filter(list -> list.size() == 1) + .map(values -> values.get(0)) + ) + ); + } + + @Override + public void setRegistrationMetadataValue( + Context context, RegistrationData registration, String schema, String element, String qualifier, String value + ) throws SQLException, AuthorizeException { + + List metadata = + registration.getMetadata() + .stream() + .filter(m -> areEquals(m, schema, element, qualifier)) + .collect(Collectors.toList()); + + if (metadata.size() > 1) { + throw new IllegalStateException("Find more than one registration metadata to update!"); + } + + RegistrationDataMetadata registrationDataMetadata; + if (metadata.isEmpty()) { + registrationDataMetadata = + createMetadata(context, registration, schema, element, qualifier, value); + } else { + registrationDataMetadata = metadata.get(0); + registrationDataMetadata.setValue(value); + } + registrationDataMetadataService.update(context, registrationDataMetadata); + } + + @Override + public void addMetadata( + Context context, RegistrationData registration, MetadataField mf, String value + ) throws SQLException, AuthorizeException { + registration.getMetadata().add( + registrationDataMetadataService.create(context, registration, mf, value) + ); + this.update(context, registration); + } + + @Override + public void addMetadata( + Context context, RegistrationData registration, String schema, String element, String qualifier, String value + ) throws SQLException, AuthorizeException { + MetadataField mf = metadataFieldService.findByElement(context, schema, element, qualifier); + registration.getMetadata().add( + registrationDataMetadataService.create(context, registration, mf, value) + ); + this.update(context, registration); + } + + @Override + public RegistrationDataMetadata getMetadataByMetadataString(RegistrationData registrationData, String field) { + return registrationData.getMetadata().stream() + .filter(m -> field.equals(m.getMetadataField().toString('.'))) + .findFirst().orElse(null); + } + + private boolean areEquals(RegistrationDataMetadata m, String schema, String element, String qualifier) { + return m.getMetadataField().getMetadataSchema().equals(schema) + && m.getMetadataField().getElement().equals(element) + && StringUtils.equals(m.getMetadataField().getQualifier(), qualifier); + } + + private RegistrationDataMetadata createMetadata( + Context context, RegistrationData registration, + String schema, String element, String qualifier, + String value + ) { + try { + return registrationDataMetadataService.create( + context, registration, schema, element, qualifier, value + ); + } catch (SQLException e) { + throw new SQLRuntimeException(e); + } + } + + private RegistrationDataMetadata createMetadata(Context context, RegistrationData registration, MetadataField mf) { + try { + return registrationDataMetadataService.create(context, registration, mf); + } catch (SQLException e) { + throw new SQLRuntimeException(e); + } + } + @Override public RegistrationData find(Context context, int id) throws SQLException { return registrationDataDAO.findByID(context, RegistrationData.class, id); @@ -75,8 +249,25 @@ public void update(Context context, List registrationDataRecor } } + @Override + public void markAsExpired(Context context, RegistrationData registrationData) throws SQLException { + registrationData.setExpires(new Date()); + registrationDataDAO.save(context, registrationData); + } + @Override public void delete(Context context, RegistrationData registrationData) throws SQLException, AuthorizeException { registrationDataDAO.delete(context, registrationData); } + + @Override + public void deleteExpiredRegistrations(Context context) throws SQLException { + registrationDataDAO.deleteExpiredBy(context, new Date()); + } + + @Override + public boolean isValid(RegistrationData rd) { + return rd.getExpires() == null || rd.getExpires().after(new Date()); + } + } diff --git a/dspace-api/src/main/java/org/dspace/eperson/RegistrationTypeEnum.java b/dspace-api/src/main/java/org/dspace/eperson/RegistrationTypeEnum.java new file mode 100644 index 000000000000..28a594742f65 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/RegistrationTypeEnum.java @@ -0,0 +1,33 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson; + +/** + * External provider allowed to register e-persons stored with {@link RegistrationData} + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public enum RegistrationTypeEnum { + + ORCID("external-login"), + VALIDATION_ORCID("review-account"), + FORGOT("forgot"), + REGISTER("register"), + INVITATION("invitation"), + CHANGE_PASSWORD("change-password"); + + private final String link; + + RegistrationTypeEnum(String link) { + this.link = link; + } + + public String getLink() { + return link; + } +} diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/EPersonDAO.java b/dspace-api/src/main/java/org/dspace/eperson/dao/EPersonDAO.java index 51ab89ef7e8f..f7543570dffb 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/EPersonDAO.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/EPersonDAO.java @@ -33,12 +33,91 @@ public interface EPersonDAO extends DSpaceObjectDAO, DSpaceObjectLegacy public EPerson findByNetid(Context context, String netid) throws SQLException; + /** + * Search all EPersons by the given MetadataField objects, sorting by the given sort fields. + *

      + * NOTE: As long as a query is specified, the EPerson's email address is included in the search alongside any given + * metadata fields. + * + * @param context DSpace context + * @param query the text to search EPersons for + * @param queryFields the metadata fields to search within (email is also included automatically) + * @param sortFields the metadata field(s) to sort the results by + * @param offset the position of the first result to return + * @param limit how many results return + * @return List of matching EPerson objects + * @throws SQLException if an error occurs + */ public List search(Context context, String query, List queryFields, List sortFields, int offset, int limit) throws SQLException; + /** + * Count number of EPersons who match a search on the given metadata fields. This returns the count of total + * results for the same query using the 'search()', and therefore can be used to provide pagination. + * + * @param context DSpace context + * @param query the text to search EPersons for + * @param queryFields the metadata fields to search within (email is also included automatically) + * @return total number of EPersons who match the query + * @throws SQLException if an error occurs + */ public int searchResultCount(Context context, String query, List queryFields) throws SQLException; - public List findByGroups(Context context, Set groups) throws SQLException; + /** + * Search all EPersons via their firstname, lastname, email (fuzzy match), limited to those EPersons which are NOT + * a member of the given group. This may be used to search across EPersons which are valid to add as members to the + * given group. + * + * @param context The DSpace context + * @param query the text to search EPersons for + * @param queryFields the metadata fields to search within (email is also included automatically) + * @param excludeGroup Group to exclude results from. Members of this group will never be returned. + * @param offset the position of the first result to return + * @param limit how many results return + * @return EPersons matching the query (which are not members of the given group) + * @throws SQLException if database error + */ + List searchNotMember(Context context, String query, List queryFields, Group excludeGroup, + List sortFields, int offset, int limit) throws SQLException; + + /** + * Count number of EPersons that match a given search (fuzzy match) across firstname, lastname and email. This + * search is limited to those EPersons which are NOT a member of the given group. This may be used + * (with searchNotMember()) to perform a paginated search across EPersons which are valid to add to the given group. + * + * @param context The DSpace context + * @param query querystring to fuzzy match against. + * @param queryFields the metadata fields to search within (email is also included automatically) + * @param excludeGroup Group to exclude results from. Members of this group will never be returned. + * @return Groups matching the query (which are not members of the given parent) + * @throws SQLException if database error + */ + int searchNotMemberCount(Context context, String query, List queryFields, Group excludeGroup) + throws SQLException; + + /** + * Find all EPersons who are a member of one or more of the listed groups in a paginated fashion. This returns + * EPersons ordered by UUID. + * + * @param context current Context + * @param groups Set of group(s) to check membership in + * @param pageSize number of EPerson objects to load at one time. Set to <=0 to disable pagination + * @param offset number of page to load (starting with 1). Set to <=0 to disable pagination + * @return List of all EPersons who are a member of one or more groups. + * @throws SQLException + */ + List findByGroups(Context context, Set groups, int pageSize, int offset) throws SQLException; + + /** + * Count total number of EPersons who are a member of one or more of the listed groups. This provides the total + * number of results to expect from corresponding findByGroups() for pagination purposes. + * + * @param context current Context + * @param groups Set of group(s) to check membership in + * @return total number of (unique) EPersons who are a member of one or more groups. + * @throws SQLException + */ + int countByGroups(Context context, Set groups) throws SQLException; public List findWithPasswordWithoutDigestAlgorithm(Context context) throws SQLException; diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java b/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java index e844d8cf2c5b..a888a4c03956 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java @@ -145,6 +145,38 @@ List findAll(Context context, List metadataSortFields, int */ int countByNameLike(Context context, String groupName) throws SQLException; + /** + * Search all groups via their name (fuzzy match), limited to those groups which are NOT a member of the given + * parent group. This may be used to search across groups which are valid to add to the given parent group. + *

      + * NOTE: The parent group itself is also excluded from the search. + * + * @param context The DSpace context + * @param groupName Group name to fuzzy match against. + * @param excludeParent Parent Group to exclude results from. Groups under this parent will never be returned. + * @param offset Offset to use for pagination (-1 to disable) + * @param limit The maximum number of results to return (-1 to disable) + * @return Groups matching the query (which are not members of the given parent) + * @throws SQLException if database error + */ + List findByNameLikeAndNotMember(Context context, String groupName, Group excludeParent, + int offset, int limit) throws SQLException; + + /** + * Count number of groups that match a given name (fuzzy match), limited to those groups which are NOT a member of + * the given parent group. This may be used (with findByNameLikeAndNotMember()) to search across groups which are + * valid to add to the given parent group. + *

      + * NOTE: The parent group itself is also excluded from the count. + * + * @param context The DSpace context + * @param groupName Group name to fuzzy match against. + * @param excludeParent Parent Group to exclude results from. Groups under this parent will never be returned. + * @return Groups matching the query (which are not members of the given parent) + * @throws SQLException if database error + */ + int countByNameLikeAndNotMember(Context context, String groupName, Group excludeParent) throws SQLException; + /** * Find a group by its name and the membership of the given EPerson * @@ -156,4 +188,28 @@ List findAll(Context context, List metadataSortFields, int */ Group findByIdAndMembership(Context context, UUID id, EPerson ePerson) throws SQLException; + /** + * Find all groups which are members of a given parent group. + * This provides the same behavior as group.getMemberGroups(), but in a paginated fashion. + * + * @param context The DSpace context + * @param parent Parent Group to search within + * @param pageSize how many results return + * @param offset the position of the first result to return + * @return Groups matching the query + * @throws SQLException if database error + */ + List findByParent(Context context, Group parent, int pageSize, int offset) throws SQLException; + + /** + * Returns the number of groups which are members of a given parent group. + * This provides the same behavior as group.getMemberGroups().size(), but with better performance for large groups. + * This method may be used with findByParent() to perform pagination. + * + * @param context The DSpace context + * @param parent Parent Group to search within + * @return Number of Groups matching the query + * @throws SQLException if database error + */ + int countByParent(Context context, Group parent) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/RegistrationDataDAO.java b/dspace-api/src/main/java/org/dspace/eperson/dao/RegistrationDataDAO.java index 5650c5e5b2be..0bdd6cc17cf8 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/RegistrationDataDAO.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/RegistrationDataDAO.java @@ -8,10 +8,12 @@ package org.dspace.eperson.dao; import java.sql.SQLException; +import java.util.Date; import org.dspace.core.Context; import org.dspace.core.GenericDAO; import org.dspace.eperson.RegistrationData; +import org.dspace.eperson.RegistrationTypeEnum; /** * Database Access Object interface class for the RegistrationData object. @@ -23,9 +25,52 @@ */ public interface RegistrationDataDAO extends GenericDAO { + /** + * Finds {@link RegistrationData} by email. + * + * @param context Context for the current request + * @param email The email + * @return + * @throws SQLException + */ public RegistrationData findByEmail(Context context, String email) throws SQLException; + /** + * Finds {@link RegistrationData} by email and type. + * + * @param context Context for the current request + * @param email The email + * @param type The type of the {@link RegistrationData} + * @return + * @throws SQLException + */ + public RegistrationData findBy(Context context, String email, RegistrationTypeEnum type) throws SQLException; + + /** + * Finds {@link RegistrationData} by token. + * + * @param context the context + * @param token The token related to the {@link RegistrationData}. + * @return + * @throws SQLException + */ public RegistrationData findByToken(Context context, String token) throws SQLException; + /** + * Deletes {@link RegistrationData} by token. + * + * @param context Context for the current request + * @param token The token to delete registrations for + * @throws SQLException + */ public void deleteByToken(Context context, String token) throws SQLException; + + /** + * Deletes expired {@link RegistrationData}. + * + * @param context Context for the current request + * @param date The date to delete expired registrations for + * @throws SQLException + */ + void deleteExpiredBy(Context context, Date date) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/RegistrationDataMetadataDAO.java b/dspace-api/src/main/java/org/dspace/eperson/dao/RegistrationDataMetadataDAO.java new file mode 100644 index 000000000000..84ef2989cc45 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/RegistrationDataMetadataDAO.java @@ -0,0 +1,22 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson.dao; + +import org.dspace.core.GenericDAO; +import org.dspace.eperson.RegistrationDataMetadata; + +/** + * Database Access Object interface class for the {@link org.dspace.eperson.RegistrationDataMetadata} object. + * The implementation of this class is responsible for all database calls for the RegistrationData object and is + * autowired by spring + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public interface RegistrationDataMetadataDAO extends GenericDAO { + +} diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java index 50547a500745..87d6c5869b09 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java @@ -70,17 +70,9 @@ public List search(Context context, String query, List q String queryString = "SELECT " + EPerson.class.getSimpleName() .toLowerCase() + " FROM EPerson as " + EPerson.class .getSimpleName().toLowerCase() + " "; - if (query != null) { - query = "%" + query.toLowerCase() + "%"; - } - Query hibernateQuery = getSearchQuery(context, queryString, query, queryFields, sortFields, null); - if (0 <= offset) { - hibernateQuery.setFirstResult(offset); - } - if (0 <= limit) { - hibernateQuery.setMaxResults(limit); - } + Query hibernateQuery = getSearchQuery(context, queryString, query, queryFields, null, + sortFields, null, limit, offset); return list(hibernateQuery); } @@ -92,6 +84,28 @@ public int searchResultCount(Context context, String query, List return count(hibernateQuery); } + @Override + public List searchNotMember(Context context, String query, List queryFields, + Group excludeGroup, List sortFields, + int offset, int limit) throws SQLException { + String queryString = "SELECT " + EPerson.class.getSimpleName() + .toLowerCase() + " FROM EPerson as " + EPerson.class + .getSimpleName().toLowerCase() + " "; + + Query hibernateQuery = getSearchQuery(context, queryString, query, queryFields, excludeGroup, + sortFields, null, limit, offset); + return list(hibernateQuery); + } + + public int searchNotMemberCount(Context context, String query, List queryFields, + Group excludeGroup) throws SQLException { + String queryString = "SELECT count(*) FROM EPerson as " + EPerson.class.getSimpleName().toLowerCase(); + + Query hibernateQuery = getSearchQuery(context, queryString, query, queryFields, excludeGroup, + Collections.EMPTY_LIST, null, -1, -1); + return count(hibernateQuery); + } + @Override public List findAll(Context context, MetadataField metadataSortField, String sortField, int pageSize, int offset) throws SQLException { @@ -105,14 +119,15 @@ public List findAll(Context context, MetadataField metadataSortField, S sortFields = Collections.singletonList(metadataSortField); } - Query query = getSearchQuery(context, queryString, null, ListUtils.EMPTY_LIST, sortFields, sortField, pageSize, - offset); + Query query = getSearchQuery(context, queryString, null, ListUtils.EMPTY_LIST, null, + sortFields, sortField, pageSize, offset); return list(query); } @Override - public List findByGroups(Context context, Set groups) throws SQLException { + public List findByGroups(Context context, Set groups, int pageSize, int offset) + throws SQLException { Query query = createQuery(context, "SELECT DISTINCT e FROM EPerson e " + "JOIN e.groups g " + @@ -122,12 +137,35 @@ public List findByGroups(Context context, Set groups) throws SQL for (Group group : groups) { idList.add(group.getID()); } - query.setParameter("idList", idList); + if (pageSize > 0) { + query.setMaxResults(pageSize); + } + if (offset > 0) { + query.setFirstResult(offset); + } + return list(query); } + @Override + public int countByGroups(Context context, Set groups) throws SQLException { + Query query = createQuery(context, + "SELECT count(DISTINCT e) FROM EPerson e " + + "JOIN e.groups g " + + "WHERE g.id IN (:idList) "); + + List idList = new ArrayList<>(groups.size()); + for (Group group : groups) { + idList.add(group.getID()); + } + + query.setParameter("idList", idList); + + return count(query); + } + @Override public List findWithPasswordWithoutDigestAlgorithm(Context context) throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); @@ -154,43 +192,88 @@ public List findNotActiveSince(Context context, Date date) throws SQLEx protected Query getSearchQuery(Context context, String queryString, String queryParam, List queryFields, List sortFields, String sortField) throws SQLException { - return getSearchQuery(context, queryString, queryParam, queryFields, sortFields, sortField, -1, -1); + return getSearchQuery(context, queryString, queryParam, queryFields, null, sortFields, sortField, -1, -1); } + /** + * Build a search query across EPersons based on the given metadata fields and sorted based on the given metadata + * field(s) or database column. + *

      + * NOTE: the EPerson's email address is included in the search alongside any given metadata fields. + * + * @param context DSpace Context + * @param queryString String which defines the beginning "SELECT" for the SQL query + * @param queryParam Actual text being searched for + * @param queryFields List of metadata fields to search within + * @param excludeGroup Optional Group which should be excluded from search. Any EPersons who are members + * of this group will not be included in the results. + * @param sortFields Optional List of metadata fields to sort by (should not be specified if sortField is used) + * @param sortField Optional database column to sort on (should not be specified if sortFields is used) + * @param pageSize how many results return + * @param offset the position of the first result to return + * @return built Query object + * @throws SQLException if error occurs + */ protected Query getSearchQuery(Context context, String queryString, String queryParam, - List queryFields, List sortFields, String sortField, - int pageSize, int offset) throws SQLException { - + List queryFields, Group excludeGroup, + List sortFields, String sortField, + int pageSize, int offset) throws SQLException { + // Initialize SQL statement using the passed in "queryString" StringBuilder queryBuilder = new StringBuilder(); queryBuilder.append(queryString); + Set metadataFieldsToJoin = new LinkedHashSet<>(); metadataFieldsToJoin.addAll(queryFields); metadataFieldsToJoin.addAll(sortFields); + // Append necessary join information for MetadataFields we will search within if (!CollectionUtils.isEmpty(metadataFieldsToJoin)) { addMetadataLeftJoin(queryBuilder, EPerson.class.getSimpleName().toLowerCase(), metadataFieldsToJoin); } - if (queryParam != null) { + // Always append a search on EPerson "email" based on query + if (StringUtils.isNotBlank(queryParam)) { addMetadataValueWhereQuery(queryBuilder, queryFields, "like", EPerson.class.getSimpleName().toLowerCase() + ".email like :queryParam"); } + // If excludeGroup is specified, exclude members of that group from results + // This uses a subquery to find the excluded group & verify that it is not in the EPerson list of "groups" + if (excludeGroup != null) { + // If query params exist, then we already have a WHERE clause (see above) and just need to append an AND + if (StringUtils.isNotBlank(queryParam)) { + queryBuilder.append(" AND "); + } else { + // no WHERE clause yet, so this is the start of the WHERE + queryBuilder.append(" WHERE "); + } + queryBuilder.append("(FROM Group g where g.id = :group_id) NOT IN elements (") + .append(EPerson.class.getSimpleName().toLowerCase()).append(".groups)"); + } + // Add sort/order by info to query, if specified if (!CollectionUtils.isEmpty(sortFields) || StringUtils.isNotBlank(sortField)) { addMetadataSortQuery(queryBuilder, sortFields, Collections.singletonList(sortField)); } + // Create the final SQL SELECT statement (based on included params above) Query query = createQuery(context, queryBuilder.toString()); + // Set pagesize & offset for pagination if (pageSize > 0) { query.setMaxResults(pageSize); } if (offset > 0) { query.setFirstResult(offset); } + // Set all parameters to the SQL SELECT statement (based on included params above) if (StringUtils.isNotBlank(queryParam)) { query.setParameter("queryParam", "%" + queryParam.toLowerCase() + "%"); } for (MetadataField metadataField : metadataFieldsToJoin) { query.setParameter(metadataField.toString(), metadataField.getID()); } + if (excludeGroup != null) { + query.setParameter("group_id", excludeGroup.getID()); + } + + query.setHint("org.hibernate.cacheable", Boolean.TRUE); return query; } diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/GroupDAOImpl.java b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/GroupDAOImpl.java index 5b71e943563a..747665860dce 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/GroupDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/GroupDAOImpl.java @@ -171,6 +171,41 @@ public int countByNameLike(final Context context, final String groupName) throws return count(query); } + @Override + public List findByNameLikeAndNotMember(Context context, String groupName, Group excludeParent, + int offset, int limit) throws SQLException { + Query query = createQuery(context, + "FROM Group " + + "WHERE lower(name) LIKE lower(:group_name) " + + "AND id != :parent_id " + + "AND (from Group g where g.id = :parent_id) not in elements (parentGroups)"); + query.setParameter("parent_id", excludeParent.getID()); + query.setParameter("group_name", "%" + StringUtils.trimToEmpty(groupName) + "%"); + + if (0 <= offset) { + query.setFirstResult(offset); + } + if (0 <= limit) { + query.setMaxResults(limit); + } + query.setHint("org.hibernate.cacheable", Boolean.TRUE); + + return list(query); + } + + @Override + public int countByNameLikeAndNotMember(Context context, String groupName, Group excludeParent) throws SQLException { + Query query = createQuery(context, + "SELECT count(*) FROM Group " + + "WHERE lower(name) LIKE lower(:group_name) " + + "AND id != :parent_id " + + "AND (from Group g where g.id = :parent_id) not in elements (parentGroups)"); + query.setParameter("parent_id", excludeParent.getID()); + query.setParameter("group_name", "%" + StringUtils.trimToEmpty(groupName) + "%"); + + return count(query); + } + @Override public void delete(Context context, Group group) throws SQLException { Query query = getHibernateSession(context) @@ -203,4 +238,29 @@ public int countRows(Context context) throws SQLException { return count(createQuery(context, "SELECT count(*) FROM Group")); } + @Override + public List findByParent(Context context, Group parent, int pageSize, int offset) throws SQLException { + Query query = createQuery(context, + "SELECT g FROM Group g JOIN g.parentGroups pg " + + "WHERE pg.id = :parent_id"); + query.setParameter("parent_id", parent.getID()); + if (pageSize > 0) { + query.setMaxResults(pageSize); + } + if (offset > 0) { + query.setFirstResult(offset); + } + query.setHint("org.hibernate.cacheable", Boolean.TRUE); + + return list(query); + } + + @Override + public int countByParent(Context context, Group parent) throws SQLException { + Query query = createQuery(context, "SELECT count(g) FROM Group g JOIN g.parentGroups pg " + + "WHERE pg.id = :parent_id"); + query.setParameter("parent_id", parent.getID()); + + return count(query); + } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/RegistrationDataDAOImpl.java b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/RegistrationDataDAOImpl.java index 4a15dcc86796..2dd023580dc8 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/RegistrationDataDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/RegistrationDataDAOImpl.java @@ -8,8 +8,10 @@ package org.dspace.eperson.dao.impl; import java.sql.SQLException; +import java.util.Date; import javax.persistence.Query; import javax.persistence.criteria.CriteriaBuilder; +import javax.persistence.criteria.CriteriaDelete; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Root; @@ -17,6 +19,7 @@ import org.dspace.core.Context; import org.dspace.eperson.RegistrationData; import org.dspace.eperson.RegistrationData_; +import org.dspace.eperson.RegistrationTypeEnum; import org.dspace.eperson.dao.RegistrationDataDAO; /** @@ -42,6 +45,21 @@ public RegistrationData findByEmail(Context context, String email) throws SQLExc return uniqueResult(context, criteriaQuery, false, RegistrationData.class); } + @Override + public RegistrationData findBy(Context context, String email, RegistrationTypeEnum type) throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, RegistrationData.class); + Root registrationDataRoot = criteriaQuery.from(RegistrationData.class); + criteriaQuery.select(registrationDataRoot); + criteriaQuery.where( + criteriaBuilder.and( + criteriaBuilder.equal(registrationDataRoot.get(RegistrationData_.email), email), + criteriaBuilder.equal(registrationDataRoot.get(RegistrationData_.registrationType), type) + ) + ); + return uniqueResult(context, criteriaQuery, false, RegistrationData.class); + } + @Override public RegistrationData findByToken(Context context, String token) throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); @@ -59,4 +77,15 @@ public void deleteByToken(Context context, String token) throws SQLException { query.setParameter("token", token); query.executeUpdate(); } + + @Override + public void deleteExpiredBy(Context context, Date date) throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaDelete deleteQuery = criteriaBuilder.createCriteriaDelete(RegistrationData.class); + Root deleteRoot = deleteQuery.from(RegistrationData.class); + deleteQuery.where( + criteriaBuilder.lessThanOrEqualTo(deleteRoot.get(RegistrationData_.expires), date) + ); + getHibernateSession(context).createQuery(deleteQuery).executeUpdate(); + } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/RegistrationDataMetadataDAOImpl.java b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/RegistrationDataMetadataDAOImpl.java new file mode 100644 index 000000000000..713032b05bbc --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/RegistrationDataMetadataDAOImpl.java @@ -0,0 +1,19 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson.dao.impl; + +import org.dspace.core.AbstractHibernateDAO; +import org.dspace.eperson.RegistrationDataMetadata; +import org.dspace.eperson.dao.RegistrationDataMetadataDAO; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public class RegistrationDataMetadataDAOImpl extends AbstractHibernateDAO + implements RegistrationDataMetadataDAO { +} diff --git a/dspace-api/src/main/java/org/dspace/eperson/dto/RegistrationDataChanges.java b/dspace-api/src/main/java/org/dspace/eperson/dto/RegistrationDataChanges.java new file mode 100644 index 000000000000..431fa8496861 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/dto/RegistrationDataChanges.java @@ -0,0 +1,49 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson.dto; + +import org.dspace.eperson.RegistrationTypeEnum; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public class RegistrationDataChanges { + + private static final String EMAIL_PATTERN = + "[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\\.)" + + "+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?"; + + private final String email; + private final RegistrationTypeEnum registrationType; + + public RegistrationDataChanges(String email, RegistrationTypeEnum type) { + if (email == null || email.trim().isBlank()) { + throw new IllegalArgumentException("Cannot update with an empty email address"); + } + if (type == null) { + throw new IllegalArgumentException("Cannot update with a null registration type"); + } + this.email = email; + if (!isValidEmail()) { + throw new IllegalArgumentException("Invalid email address provided!"); + } + this.registrationType = type; + } + + public boolean isValidEmail() { + return email.matches(EMAIL_PATTERN); + } + + public String getEmail() { + return email; + } + + public RegistrationTypeEnum getRegistrationType() { + return registrationType; + } +} diff --git a/dspace-api/src/main/java/org/dspace/eperson/dto/RegistrationDataPatch.java b/dspace-api/src/main/java/org/dspace/eperson/dto/RegistrationDataPatch.java new file mode 100644 index 000000000000..e681193d3dd2 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/dto/RegistrationDataPatch.java @@ -0,0 +1,32 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson.dto; + +import org.dspace.eperson.RegistrationData; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public class RegistrationDataPatch { + + private final RegistrationData oldRegistration; + private final RegistrationDataChanges changes; + + public RegistrationDataPatch(RegistrationData oldRegistration, RegistrationDataChanges changes) { + this.oldRegistration = oldRegistration; + this.changes = changes; + } + + public RegistrationData getOldRegistration() { + return oldRegistration; + } + + public RegistrationDataChanges getChanges() { + return changes; + } +} diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/AccountService.java b/dspace-api/src/main/java/org/dspace/eperson/service/AccountService.java index 2cc0c8c355ef..ebfa7fc89d91 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/service/AccountService.java +++ b/dspace-api/src/main/java/org/dspace/eperson/service/AccountService.java @@ -16,6 +16,8 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.core.Context; import org.dspace.eperson.EPerson; +import org.dspace.eperson.RegistrationData; +import org.dspace.eperson.dto.RegistrationDataPatch; /** * Methods for handling registration by email and forgotten passwords. When @@ -39,6 +41,10 @@ public void sendRegistrationInfo(Context context, String email, List group public void sendForgotPasswordInfo(Context context, String email, List groups) throws SQLException, IOException, MessagingException, AuthorizeException; + boolean existsAccountFor(Context context, String token) throws SQLException, AuthorizeException; + + boolean existsAccountWithEmail(Context context, String email) throws SQLException; + public EPerson getEPerson(Context context, String token) throws SQLException, AuthorizeException; @@ -48,4 +54,14 @@ public String getEmail(Context context, String token) public void deleteToken(Context context, String token) throws SQLException; + + EPerson mergeRegistration(Context context, UUID userId, String token, List overrides) + throws AuthorizeException, SQLException; + + RegistrationData renewRegistrationForEmail( + Context context, RegistrationDataPatch registrationDataPatch + ) throws AuthorizeException; + + + boolean isTokenValidForCreation(RegistrationData registrationData); } diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java b/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java index 010e5879dae6..ab12765ee960 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java +++ b/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java @@ -13,6 +13,7 @@ import java.util.Date; import java.util.List; import java.util.Set; +import javax.validation.constraints.NotNull; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Item; @@ -99,9 +100,9 @@ public List search(Context context, String query) * * @param context The relevant DSpace Context. * @param query The search string - * @param offset Inclusive offset + * @param offset Inclusive offset (the position of the first result to return) * @param limit Maximum number of matches returned - * @return array of EPerson objects + * @return List of matching EPerson objects * @throws SQLException An exception that provides information on a database access error or other errors. */ public List search(Context context, String query, int offset, int limit) @@ -119,6 +120,34 @@ public List search(Context context, String query, int offset, int limit public int searchResultCount(Context context, String query) throws SQLException; + /** + * Find the EPersons that match the search query which are NOT currently members of the given Group. The search + * query is run against firstname, lastname or email. + * + * @param context DSpace context + * @param query The search string + * @param excludeGroup Group to exclude results from. Members of this group will never be returned. + * @param offset Inclusive offset (the position of the first result to return) + * @param limit Maximum number of matches returned + * @return List of matching EPerson objects + * @throws SQLException if error + */ + List searchNonMembers(Context context, String query, Group excludeGroup, + int offset, int limit) throws SQLException; + + /** + * Returns the total number of EPersons that match the search query which are NOT currently members of the given + * Group. The search query is run against firstname, lastname or email. Can be used with searchNonMembers() to + * support pagination + * + * @param context DSpace context + * @param query The search string + * @param excludeGroup Group to exclude results from. Members of this group will never be returned. + * @return List of matching EPerson objects + * @throws SQLException if error + */ + int searchNonMembersCount(Context context, String query, Group excludeGroup) throws SQLException; + /** * Find all the {@code EPerson}s in a specific order by field. * The sortable fields are: @@ -159,6 +188,19 @@ public List findAll(Context context, int sortField) public List findAll(Context context, int sortField, int pageSize, int offset) throws SQLException; + /** + * The "System EPerson" is a fake account that exists only to receive email. + * It has an email address that should be presumed usable. It does not + * exist in the database and is not complete. + * + * @param context current DSpace session. + * @return an EPerson that can presumably receive email. + * @throws SQLException + */ + @NotNull + public EPerson getSystemEPerson(Context context) + throws SQLException; + /** * Create a new eperson * @@ -240,14 +282,42 @@ public EPerson create(Context context) throws SQLException, public List getDeleteConstraints(Context context, EPerson ePerson) throws SQLException; /** - * Retrieve all accounts which belong to at least one of the specified groups. + * Retrieve all EPerson accounts which belong to at least one of the specified groups. + *

      + * WARNING: This method may have bad performance issues for Groups with a very large number of members, + * as it will load all member EPerson objects into memory. + *

      + * For better performance, use the paginated version of this method. * * @param c The relevant DSpace Context. * @param groups set of eperson groups * @return a list of epeople * @throws SQLException An exception that provides information on a database access error or other errors. */ - public List findByGroups(Context c, Set groups) throws SQLException; + List findByGroups(Context c, Set groups) throws SQLException; + + /** + * Retrieve all EPerson accounts which belong to at least one of the specified groups, in a paginated fashion. + * + * @param c The relevant DSpace Context. + * @param groups Set of group(s) to check membership in + * @param pageSize number of EPerson objects to load at one time. Set to <=0 to disable pagination + * @param offset number of page to load (starting with 1). Set to <=0 to disable pagination + * @return a list of epeople + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + List findByGroups(Context c, Set groups, int pageSize, int offset) throws SQLException; + + /** + * Count all EPerson accounts which belong to at least one of the specified groups. This provides the total + * number of results to expect from corresponding findByGroups() for pagination purposes. + * + * @param c The relevant DSpace Context. + * @param groups Set of group(s) to check membership in + * @return total number of (unique) EPersons who are a member of one or more groups. + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + int countByGroups(Context c, Set groups) throws SQLException; /** * Retrieve all accounts which are subscribed to receive information about new items. diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java b/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java index defaa9a745ab..acd98495c0d2 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java +++ b/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java @@ -189,9 +189,11 @@ public interface GroupService extends DSpaceObjectService, DSpaceObjectLe Set allMemberGroupsSet(Context context, EPerson ePerson) throws SQLException; /** - * Get all of the epeople who are a member of the - * specified group, or a member of a sub-group of the + * Get all of the EPerson objects who are a member of the specified group, or a member of a subgroup of the * specified group, etc. + *

      + * WARNING: This method may have bad performance for Groups with a very large number of members, as it will load + * all member EPerson objects into memory. Only use if you need access to *every* EPerson object at once. * * @param context The relevant DSpace Context. * @param group Group object @@ -200,6 +202,18 @@ public interface GroupService extends DSpaceObjectService, DSpaceObjectLe */ public List allMembers(Context context, Group group) throws SQLException; + /** + * Count all of the EPerson objects who are a member of the specified group, or a member of a subgroup of the + * specified group, etc. + * In other words, this will return the size of "allMembers()" without having to load all EPerson objects into + * memory. + * @param context current DSpace context + * @param group Group object + * @return count of EPerson object members + * @throws SQLException if error + */ + int countAllMembers(Context context, Group group) throws SQLException; + /** * Find the group by its name - assumes name is unique * @@ -258,37 +272,67 @@ public List findAll(Context context, List metadataSortFiel public List findAll(Context context, int sortField) throws SQLException; /** - * Find the groups that match the search query across eperson_group_id or name + * Find the Groups that match the query across both Group name and Group ID. This is an unpaginated search, + * which means it will load all matching groups into memory at once. This may provide POOR PERFORMANCE when a large + * number of groups are matched. * - * @param context DSpace context - * @param groupIdentifier The group name or group ID - * @return array of Group objects + * @param context DSpace context + * @param query The search string used to search across group name or group ID + * @return List of matching Group objects * @throws SQLException if error */ - public List search(Context context, String groupIdentifier) throws SQLException; + List search(Context context, String query) throws SQLException; /** - * Find the groups that match the search query across eperson_group_id or name + * Find the Groups that match the query across both Group name and Group ID. This method supports pagination, + * which provides better performance than the above non-paginated search() method. * - * @param context DSpace context - * @param groupIdentifier The group name or group ID - * @param offset Inclusive offset - * @param limit Maximum number of matches returned - * @return array of Group objects + * @param context DSpace context + * @param query The search string used to search across group name or group ID + * @param offset Inclusive offset (the position of the first result to return) + * @param limit Maximum number of matches returned + * @return List of matching Group objects * @throws SQLException if error */ - public List search(Context context, String groupIdentifier, int offset, int limit) throws SQLException; + List search(Context context, String query, int offset, int limit) throws SQLException; /** - * Returns the total number of groups returned by a specific query, without the overhead - * of creating the Group objects to store the results. + * Returns the total number of Groups returned by a specific query. Search is performed based on Group name + * and Group ID. May be used with search() above to support pagination of matching Groups. * * @param context DSpace context - * @param query The search string + * @param query The search string used to search across group name or group ID * @return the number of groups matching the query * @throws SQLException if error */ - public int searchResultCount(Context context, String query) throws SQLException; + int searchResultCount(Context context, String query) throws SQLException; + + /** + * Find the groups that match the search query which are NOT currently members (subgroups) + * of the given parentGroup + * + * @param context DSpace context + * @param query The search string used to search across group name or group ID + * @param excludeParentGroup Parent group to exclude results from + * @param offset Inclusive offset (the position of the first result to return) + * @param limit Maximum number of matches returned + * @return List of matching Group objects + * @throws SQLException if error + */ + List searchNonMembers(Context context, String query, Group excludeParentGroup, + int offset, int limit) throws SQLException; + + /** + * Returns the total number of groups that match the search query which are NOT currently members (subgroups) + * of the given parentGroup. Can be used with searchNonMembers() to support pagination. + * + * @param context DSpace context + * @param query The search string used to search across group name or group ID + * @param excludeParentGroup Parent group to exclude results from + * @return the number of Groups matching the query + * @throws SQLException if error + */ + int searchNonMembersCount(Context context, String query, Group excludeParentGroup) throws SQLException; /** * Return true if group has no direct or indirect members @@ -338,4 +382,29 @@ public List findAll(Context context, List metadataSortFiel */ List findByMetadataField(Context context, String searchValue, MetadataField metadataField) throws SQLException; + + /** + * Find all groups which are a member of the given Parent group + * + * @param context The relevant DSpace Context. + * @param parent The parent Group to search on + * @param pageSize how many results return + * @param offset the position of the first result to return + * @return List of all groups which are members of the parent group + * @throws SQLException database exception if error + */ + List findByParent(Context context, Group parent, int pageSize, int offset) + throws SQLException; + + /** + * Return number of groups which are a member of the given Parent group. + * Can be used with findByParent() for pagination of all groups within a given Parent group. + * + * @param context The relevant DSpace Context. + * @param parent The parent Group to search on + * @return number of groups which are members of the parent group + * @throws SQLException database exception if error + */ + int countByParent(Context context, Group parent) + throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/RegistrationDataMetadataService.java b/dspace-api/src/main/java/org/dspace/eperson/service/RegistrationDataMetadataService.java new file mode 100644 index 000000000000..b547c4fca80b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/service/RegistrationDataMetadataService.java @@ -0,0 +1,33 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson.service; + +import java.sql.SQLException; + +import org.dspace.content.MetadataField; +import org.dspace.core.Context; +import org.dspace.eperson.RegistrationData; +import org.dspace.eperson.RegistrationDataMetadata; +import org.dspace.service.DSpaceCRUDService; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public interface RegistrationDataMetadataService extends DSpaceCRUDService { + + RegistrationDataMetadata create(Context context, RegistrationData registrationData, String schema, + String element, String qualifier, String value) throws SQLException; + + RegistrationDataMetadata create( + Context context, RegistrationData registrationData, MetadataField metadataField + ) throws SQLException; + + RegistrationDataMetadata create( + Context context, RegistrationData registrationData, MetadataField metadataField, String value + ) throws SQLException; +} diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/RegistrationDataService.java b/dspace-api/src/main/java/org/dspace/eperson/service/RegistrationDataService.java index d1e78fa2bce2..f10da961ca48 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/service/RegistrationDataService.java +++ b/dspace-api/src/main/java/org/dspace/eperson/service/RegistrationDataService.java @@ -8,13 +8,23 @@ package org.dspace.eperson.service; import java.sql.SQLException; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Stream; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.MetadataField; +import org.dspace.content.MetadataValue; import org.dspace.core.Context; +import org.dspace.eperson.EPerson; import org.dspace.eperson.RegistrationData; +import org.dspace.eperson.RegistrationDataMetadata; +import org.dspace.eperson.RegistrationTypeEnum; +import org.dspace.eperson.dto.RegistrationDataPatch; import org.dspace.service.DSpaceCRUDService; /** - * Service interface class for the RegistrationData object. + * Service interface class for the {@link RegistrationData} object. * The implementation of this class is responsible for all business logic calls for the RegistrationData object and * is autowired by spring * @@ -22,10 +32,45 @@ */ public interface RegistrationDataService extends DSpaceCRUDService { + RegistrationData create(Context context) throws SQLException, AuthorizeException; + + RegistrationData create(Context context, String netId) throws SQLException, AuthorizeException; + + RegistrationData create(Context context, String netId, RegistrationTypeEnum type) + throws SQLException, AuthorizeException; + + RegistrationData clone( + Context context, RegistrationDataPatch registrationDataPatch + ) throws SQLException, AuthorizeException; + public RegistrationData findByToken(Context context, String token) throws SQLException; public RegistrationData findByEmail(Context context, String email) throws SQLException; + RegistrationData findBy(Context context, String email, RegistrationTypeEnum type) throws SQLException; + public void deleteByToken(Context context, String token) throws SQLException; + Stream>> groupEpersonMetadataByRegistrationData( + EPerson ePerson, RegistrationData registrationData + ) throws SQLException; + + void setRegistrationMetadataValue( + Context context, RegistrationData registration, String schema, String element, String qualifier, String value + ) throws SQLException, AuthorizeException; + + void addMetadata( + Context context, RegistrationData registration, String schema, String element, String qualifier, String value + ) throws SQLException, AuthorizeException; + + RegistrationDataMetadata getMetadataByMetadataString(RegistrationData registrationData, String field); + + void addMetadata(Context context, RegistrationData rd, MetadataField metadataField, String value) + throws SQLException, AuthorizeException; + + void markAsExpired(Context context, RegistrationData registrationData) throws SQLException, AuthorizeException; + + void deleteExpiredRegistrations(Context context) throws SQLException; + + boolean isValid(RegistrationData rd); } diff --git a/dspace-api/src/main/java/org/dspace/event/Consumer.java b/dspace-api/src/main/java/org/dspace/event/Consumer.java index 1a8b16e98a0b..f56efcc7bacb 100644 --- a/dspace-api/src/main/java/org/dspace/event/Consumer.java +++ b/dspace-api/src/main/java/org/dspace/event/Consumer.java @@ -10,18 +10,16 @@ import org.dspace.core.Context; /** - * Interface for content event consumers. Note that the consumer cannot tell if - * it is invoked synchronously or asynchronously; the consumer interface and - * sequence of calls is the same for both. Asynchronous consumers may see more - * consume() calls between the start and end of the event stream, if they are - * invoked asynchronously, once in a long time period, rather than synchronously - * after every Context.commit(). - * - * @version $Revision$ + * Interface for content event consumers. Note that the consumer cannot tell + * if it is invoked synchronously or asynchronously; the consumer interface + * and sequence of calls is the same for both. Asynchronous consumers may see + * more consume() calls between the start and end of the event stream, if they + * are invoked asynchronously, once in a long time period, rather than + * synchronously after every Context.commit(). */ public interface Consumer { /** - * Initialize - allocate any resources required to operate. This may include + * Allocate any resources required to operate. This may include * initializing any pooled JMS resources. Called ONCE when created by the * dispatcher pool. This should be used to set up expensive resources that * will remain for the lifetime of the consumer. @@ -31,12 +29,17 @@ public interface Consumer { public void initialize() throws Exception; /** - * Consume an event; events may get filtered at the dispatcher level, hiding - * it from the consumer. This behavior is based on the dispatcher/consumer - * configuration. Should include logic to initialize any resources required - * for a batch of events. + * Consume an event. Events may be filtered by a dispatcher, hiding them + * from the consumer. This behavior is based on the dispatcher/consumer + * configuration. Should include logic to initialize any resources + * required for a batch of events. + * + *

      This method must not commit the context. Committing causes + * re-dispatch of the event queue, which can result in infinite recursion + * leading to memory exhaustion as seen in + * {@link https://github.com/DSpace/DSpace/pull/8756}. * - * @param ctx the execution context object + * @param ctx the current DSpace session * @param event the content event * @throws Exception if error */ diff --git a/dspace-api/src/main/java/org/dspace/event/package-info.java b/dspace-api/src/main/java/org/dspace/event/package-info.java new file mode 100644 index 000000000000..544dfb271a1d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/event/package-info.java @@ -0,0 +1,20 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +/** + * Actions which alter DSpace model objects can queue {@link Event}s, which + * are presented to {@link Consumer}s by a {@link Dispatcher}. A pool of + * {@code Dispatcher}s is managed by an {@link service.EventService}, guided + * by configuration properties {@code event.dispatcher.*}. + * + *

      One must be careful not to commit the current DSpace {@code Context} + * during event dispatch. {@code commit()} triggers event dispatching, and + * doing this during event dispatch can lead to infinite recursion and + * memory exhaustion. + */ + +package org.dspace.event; diff --git a/dspace-api/src/main/java/org/dspace/external/model/ExternalDataObject.java b/dspace-api/src/main/java/org/dspace/external/model/ExternalDataObject.java index eac9921df6cc..44ad6a70953e 100644 --- a/dspace-api/src/main/java/org/dspace/external/model/ExternalDataObject.java +++ b/dspace-api/src/main/java/org/dspace/external/model/ExternalDataObject.java @@ -9,6 +9,7 @@ import java.util.ArrayList; import java.util.List; +import java.util.UUID; import org.dspace.content.dto.MetadataValueDTO; @@ -38,6 +39,8 @@ public class ExternalDataObject { */ private String displayValue; + private List matchUUIDs; + /** * Default constructor */ @@ -143,4 +146,16 @@ public String getValue() { public void setValue(String value) { this.value = value; } + + public List getMatchUUIDs() { + return matchUUIDs; + } + + public void setMatchUUIDs(List matchUUIDs) { + this.matchUUIDs = matchUUIDs; + } + + public boolean isDuplicated() { + return !matchUUIDs.isEmpty(); + } } diff --git a/dspace-api/src/main/java/org/dspace/external/provider/impl/LiveImportDataProvider.java b/dspace-api/src/main/java/org/dspace/external/provider/impl/LiveImportDataProvider.java index 21c14813f93d..9897639f04a6 100644 --- a/dspace-api/src/main/java/org/dspace/external/provider/impl/LiveImportDataProvider.java +++ b/dspace-api/src/main/java/org/dspace/external/provider/impl/LiveImportDataProvider.java @@ -9,6 +9,7 @@ import java.util.Collection; import java.util.List; +import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; @@ -135,9 +136,8 @@ public int getNumberOfResults(String query) { * @return */ private ExternalDataObject getExternalDataObject(ImportRecord record) { - //return 400 if no record were found - if (record == null) { - throw new IllegalArgumentException("No record found for query or id"); + if (Objects.isNull(record)) { + return null; } ExternalDataObject externalDataObject = new ExternalDataObject(sourceIdentifier); String id = getFirstValue(record, recordIdMetadata); diff --git a/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidV3AuthorDataProvider.java b/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidV3AuthorDataProvider.java index 7a836113936c..a07cf89c503e 100644 --- a/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidV3AuthorDataProvider.java +++ b/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidV3AuthorDataProvider.java @@ -16,6 +16,7 @@ import java.util.Collections; import java.util.LinkedList; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; @@ -27,6 +28,7 @@ import org.apache.http.impl.client.HttpClientBuilder; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.dspace.content.MetadataFieldName; import org.dspace.content.dto.MetadataValueDTO; import org.dspace.external.OrcidRestConnector; import org.dspace.external.model.ExternalDataObject; @@ -35,6 +37,7 @@ import org.json.JSONObject; import org.orcid.jaxb.model.v3.release.common.OrcidIdentifier; import org.orcid.jaxb.model.v3.release.record.Person; +import org.orcid.jaxb.model.v3.release.record.Record; import org.orcid.jaxb.model.v3.release.search.Result; import org.springframework.beans.factory.annotation.Autowired; @@ -60,6 +63,8 @@ public class OrcidV3AuthorDataProvider extends AbstractExternalDataProvider { private XMLtoBio converter; + private Map externalIdentifiers; + public static final String ORCID_ID_SYNTAX = "\\d{4}-\\d{4}-\\d{4}-(\\d{3}X|\\d{4})"; private static final int MAX_INDEX = 10000; @@ -113,12 +118,13 @@ public void init() throws IOException { @Override public Optional getExternalDataObject(String id) { - Person person = getBio(id); - ExternalDataObject externalDataObject = convertToExternalDataObject(person); + Record record = getBio(id); + ExternalDataObject externalDataObject = convertToExternalDataObject(record); return Optional.of(externalDataObject); } - protected ExternalDataObject convertToExternalDataObject(Person person) { + protected ExternalDataObject convertToExternalDataObject(Record record) { + Person person = record.getPerson(); ExternalDataObject externalDataObject = new ExternalDataObject(sourceIdentifier); if (person.getName() != null) { String lastName = ""; @@ -141,6 +147,12 @@ protected ExternalDataObject convertToExternalDataObject(Person person) { externalDataObject .addMetadata(new MetadataValueDTO("dc", "identifier", "uri", null, orcidUrl + '/' + person.getName().getPath())); + + appendOtherNames(externalDataObject, person); + appendResearcherUrls(externalDataObject, person); + appendExternalIdentifiers(externalDataObject, person); + appendAffiliations(externalDataObject, record); + if (!StringUtils.isBlank(lastName) && !StringUtils.isBlank(firstName)) { externalDataObject.setDisplayValue(lastName + ", " + firstName); externalDataObject.setValue(lastName + ", " + firstName); @@ -157,24 +169,64 @@ protected ExternalDataObject convertToExternalDataObject(Person person) { return externalDataObject; } + private void appendOtherNames(ExternalDataObject externalDataObject, Person person) { + person.getOtherNames().getOtherNames().forEach(otherName -> + externalDataObject.addMetadata(new MetadataValueDTO("crisrp", "name", "variant", null, + otherName.getContent()))); + } + + private void appendResearcherUrls(ExternalDataObject externalDataObject, Person person) { + person.getResearcherUrls().getResearcherUrls().forEach(researcherUrl -> + externalDataObject.addMetadata(new MetadataValueDTO("oairecerif", "identifier", "url", null, + researcherUrl.getUrl().getValue()))); + } + + private void appendExternalIdentifiers(ExternalDataObject externalDataObject, Person person) { + if (getExternalIdentifiers() != null) { + person.getExternalIdentifiers() + .getExternalIdentifiers() + .forEach(externalIdentifier -> { + String metadataField = externalIdentifiers.get(externalIdentifier.getType()); + if (StringUtils.isNotEmpty(metadataField)) { + MetadataFieldName field = new MetadataFieldName(metadataField); + externalDataObject.addMetadata( + new MetadataValueDTO(field.schema, field.element, field.qualifier, null, + externalIdentifier.getValue())); + } + }); + } + } + + private void appendAffiliations(ExternalDataObject externalDataObject, Record record) { + record.getActivitiesSummary() + .getEmployments() + .getEmploymentGroups() + .stream() + .flatMap(affiliationGroup -> + affiliationGroup.getActivities().stream()) + .forEach(employmentSummary -> + externalDataObject.addMetadata(new MetadataValueDTO("person", "affiliation", "name", + null, employmentSummary.getOrganization().getName()))); + } + /** - * Retrieve a Person object based on a given orcid identifier. + * Retrieve a Record object based on a given orcid identifier. * @param id orcid identifier - * @return Person + * @return Record */ - public Person getBio(String id) { + public Record getBio(String id) { log.debug("getBio called with ID=" + id); if (!isValid(id)) { return null; } - InputStream bioDocument = orcidRestConnector.get(id + ((id.endsWith("/person")) ? "" : "/person"), accessToken); - Person person = converter.convertSinglePerson(bioDocument); + InputStream bioDocument = orcidRestConnector.get(id, accessToken); + Record record = converter.convertToRecord(bioDocument); try { bioDocument.close(); } catch (IOException e) { log.error(e.getMessage(), e); } - return person; + return record; } /** @@ -201,13 +253,13 @@ public List searchExternalDataObjects(String query, int star log.debug("queryBio searchPath=" + searchPath + " accessToken=" + accessToken); InputStream bioDocument = orcidRestConnector.get(searchPath, accessToken); List results = converter.convert(bioDocument); - List bios = new LinkedList<>(); + List bios = new LinkedList<>(); for (Result result : results) { OrcidIdentifier orcidIdentifier = result.getOrcidIdentifier(); if (orcidIdentifier != null) { log.debug("Found OrcidId=" + orcidIdentifier.toString()); String orcid = orcidIdentifier.getPath(); - Person bio = getBio(orcid); + Record bio = getBio(orcid); if (bio != null) { bios.add(bio); } @@ -298,4 +350,11 @@ public void setOrcidRestConnector(OrcidRestConnector orcidRestConnector) { this.orcidRestConnector = orcidRestConnector; } + public Map getExternalIdentifiers() { + return externalIdentifiers; + } + + public void setExternalIdentifiers(Map externalIdentifiers) { + this.externalIdentifiers = externalIdentifiers; + } } diff --git a/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/Converter.java b/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/Converter.java index 8f48cda712bc..756b8654f285 100644 --- a/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/Converter.java +++ b/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/Converter.java @@ -12,6 +12,9 @@ import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Unmarshaller; +import javax.xml.stream.XMLInputFactory; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.XMLStreamReader; import org.xml.sax.SAXException; @@ -28,11 +31,16 @@ public abstract class Converter { protected Object unmarshall(InputStream input, Class type) throws SAXException, URISyntaxException { try { + XMLInputFactory xmlInputFactory = XMLInputFactory.newFactory(); + // disallow DTD parsing to ensure no XXE attacks can occur + xmlInputFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false); + XMLStreamReader xmlStreamReader = xmlInputFactory.createXMLStreamReader(input); + JAXBContext context = JAXBContext.newInstance(type); Unmarshaller unmarshaller = context.createUnmarshaller(); - return unmarshaller.unmarshal(input); - } catch (JAXBException e) { - throw new RuntimeException("Unable to unmarshall orcid message" + e); + return unmarshaller.unmarshal(xmlStreamReader); + } catch (JAXBException | XMLStreamException e) { + throw new RuntimeException("Unable to unmarshall orcid message: " + e); } } } diff --git a/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/XMLtoBio.java b/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/XMLtoBio.java index 25b3cf787feb..ff7cedbb47ab 100644 --- a/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/XMLtoBio.java +++ b/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/XMLtoBio.java @@ -14,6 +14,7 @@ import org.apache.logging.log4j.Logger; import org.orcid.jaxb.model.v3.release.record.Person; +import org.orcid.jaxb.model.v3.release.record.Record; import org.orcid.jaxb.model.v3.release.search.Result; import org.orcid.jaxb.model.v3.release.search.Search; import org.xml.sax.SAXException; @@ -64,4 +65,15 @@ public Person convertSinglePerson(InputStream xml) { } return null; } + + public Record convertToRecord(InputStream xml) { + Record record = null; + try { + record = (Record) unmarshall(xml, Record.class); + return record; + } catch (SAXException | URISyntaxException e) { + log.error(e); + } + return record; + } } diff --git a/dspace-api/src/main/java/org/dspace/external/service/impl/ExternalDataServiceImpl.java b/dspace-api/src/main/java/org/dspace/external/service/impl/ExternalDataServiceImpl.java index 7804dfa5689f..76e4fff4f527 100644 --- a/dspace-api/src/main/java/org/dspace/external/service/impl/ExternalDataServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/external/service/impl/ExternalDataServiceImpl.java @@ -7,12 +7,24 @@ */ package org.dspace.external.service.impl; +import static org.dspace.app.deduplication.service.impl.SolrDedupServiceImpl.RESOURCE_FLAG_FIELD; +import static org.dspace.app.deduplication.service.impl.SolrDedupServiceImpl.RESOURCE_IDS_FIELD; +import static org.dspace.app.deduplication.service.impl.SolrDedupServiceImpl.RESOURCE_SIGNATURE_FIELD; + import java.sql.SQLException; +import java.util.ArrayList; import java.util.List; import java.util.Optional; +import java.util.UUID; import java.util.stream.Collectors; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.common.SolrDocument; +import org.dspace.app.deduplication.service.DedupService; +import org.dspace.app.deduplication.service.impl.SolrDedupServiceImpl; +import org.dspace.app.deduplication.utils.Signature; import org.dspace.app.suggestion.SuggestionProvider; import org.dspace.app.suggestion.SuggestionService; import org.dspace.authorize.AuthorizeException; @@ -22,11 +34,14 @@ import org.dspace.content.dto.MetadataValueDTO; import org.dspace.content.service.ItemService; import org.dspace.content.service.WorkspaceItemService; +import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.LogHelper; +import org.dspace.discovery.SearchServiceException; import org.dspace.external.model.ExternalDataObject; import org.dspace.external.provider.ExternalDataProvider; import org.dspace.external.service.ExternalDataService; +import org.dspace.utils.DSpace; import org.springframework.beans.factory.annotation.Autowired; /** @@ -49,6 +64,9 @@ public class ExternalDataServiceImpl implements ExternalDataService { @Autowired private SuggestionService suggestionService; + @Autowired + private DedupService dedupService; + @Override public Optional getExternalDataObject(String source, String id) { ExternalDataProvider provider = getExternalDataProvider(source); @@ -64,9 +82,53 @@ public List searchExternalDataObjects(String source, String if (provider == null) { throw new IllegalArgumentException("Provider for: " + source + " couldn't be found"); } - return provider.searchExternalDataObjects(query, start, limit); + + List externalDataObjects = provider.searchExternalDataObjects(query, start, limit); + appendMatchedUUIDs(externalDataObjects); + + return externalDataObjects; + } + + private void appendMatchedUUIDs(List externalDataObjects) { + for (ExternalDataObject externalDataObject : externalDataObjects) { + List uuids = new ArrayList<>(); + try { + QueryResponse response = dedupService.find("*:*", buildFilters(externalDataObject)); + for (SolrDocument resultDoc : response.getResults()) { + uuids.addAll(resultDoc.getFieldValues(RESOURCE_IDS_FIELD) + .stream() + .map(id -> + UUID.fromString(String.valueOf(id))) + .collect(Collectors.toList())); + } + externalDataObject.setMatchUUIDs(uuids); + } catch (SearchServiceException e) { + throw new RuntimeException(e); + } + } + } + + private String[] buildFilters(ExternalDataObject object) { + List filters = new ArrayList<>(); + List allSignatures = getAllSignatures(object); + + if (!allSignatures.isEmpty()) { + filters.add(RESOURCE_FLAG_FIELD + ":" + SolrDedupServiceImpl.DeduplicationFlag.FAKE.getDescription()); + filters.add(RESOURCE_SIGNATURE_FIELD + ":(" + + StringUtils.joinWith(" OR ", allSignatures.stream().toArray(String[]::new)) + ")"); + } + + return filters.toArray(new String[filters.size()]); } + private List getAllSignatures(ExternalDataObject iu) { + List signAlgo = new DSpace().getServiceManager().getServicesByType(Signature.class); + return signAlgo.stream() + .filter(algo -> Constants.ITEM == algo.getResourceTypeID()) + .flatMap(algo -> algo.getSignature(iu).stream()) + .filter(signature -> StringUtils.isNotEmpty(signature)) + .collect(Collectors.toList()); + } @Override public List getExternalDataProviders() { diff --git a/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java b/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java index c169e4712f7f..c1c59acf4a63 100644 --- a/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java +++ b/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java @@ -22,6 +22,8 @@ import org.apache.commons.lang.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.core.Constants; import org.dspace.core.Context; @@ -77,7 +79,7 @@ public void receiveEvent(Event event) { UsageEvent usageEvent = (UsageEvent) event; LOGGER.debug("Usage event received " + event.getName()); - if (isNotBitstreamViewEvent(usageEvent)) { + if (!isContentBitstream(usageEvent)) { return; } @@ -171,9 +173,33 @@ private String getDocumentPath(HttpServletRequest request) { return documentPath; } - private boolean isNotBitstreamViewEvent(UsageEvent usageEvent) { - return usageEvent.getAction() != UsageEvent.Action.VIEW - || usageEvent.getObject().getType() != Constants.BITSTREAM; + /** + * Verifies if the usage event is a content bitstream view event, by checking if:

        + *
      • the usage event is a view event
      • + *
      • the object of the usage event is a bitstream
      • + *
      • the bitstream belongs to one of the configured bundles (fallback: ORIGINAL bundle)
      + */ + private boolean isContentBitstream(UsageEvent usageEvent) { + // check if event is a VIEW event and object is a Bitstream + if (usageEvent.getAction() == UsageEvent.Action.VIEW + && usageEvent.getObject().getType() == Constants.BITSTREAM) { + // check if bitstream belongs to a configured bundle + List allowedBundles = List.of(configurationService + .getArrayProperty("google-analytics.bundles", new String[]{Constants.CONTENT_BUNDLE_NAME})); + if (allowedBundles.contains("none")) { + // GA events for bitstream views were turned off in config + return false; + } + List bitstreamBundles; + try { + bitstreamBundles = ((Bitstream) usageEvent.getObject()) + .getBundles().stream().map(Bundle::getName).collect(Collectors.toList()); + } catch (SQLException e) { + throw new RuntimeException(e.getMessage(), e); + } + return allowedBundles.stream().anyMatch(bitstreamBundles::contains); + } + return false; } private boolean isGoogleAnalyticsKeyNotConfigured() { diff --git a/dspace-api/src/main/java/org/dspace/handle/dao/impl/HandleDAOImpl.java b/dspace-api/src/main/java/org/dspace/handle/dao/impl/HandleDAOImpl.java index febf8d42d47d..71bb798ae387 100644 --- a/dspace-api/src/main/java/org/dspace/handle/dao/impl/HandleDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/handle/dao/impl/HandleDAOImpl.java @@ -90,12 +90,11 @@ public List findByPrefix(Context context, String prefix) throws SQLExcep @Override public long countHandlesByPrefix(Context context, String prefix) throws SQLException { - CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); - CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(Long.class); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Handle.class); Root handleRoot = criteriaQuery.from(Handle.class); - criteriaQuery.select(criteriaBuilder.count(handleRoot)); + criteriaQuery.select(handleRoot); criteriaQuery.where(criteriaBuilder.like(handleRoot.get(Handle_.handle), prefix + "%")); return countLong(context, criteriaQuery, criteriaBuilder, handleRoot); } diff --git a/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java index b70eda960d35..4550a84b1c0a 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java @@ -13,6 +13,7 @@ import java.util.Arrays; import java.util.List; import java.util.Objects; +import javax.annotation.PostConstruct; import org.dspace.authorize.AuthorizeException; import org.dspace.content.DSpaceObject; @@ -67,13 +68,14 @@ public class DOIIdentifierProvider extends FilteredIdentifierProvider { static final String CFG_PREFIX = "identifier.doi.prefix"; static final String CFG_NAMESPACE_SEPARATOR = "identifier.doi.namespaceseparator"; + private static final String DOI_METADATA = "identifier.doi.metadata"; static final char SLASH = '/'; // Metadata field name elements // TODO: move these to MetadataSchema or some such? - public static final String MD_SCHEMA = "dc"; - public static final String DOI_ELEMENT = "identifier"; - public static final String DOI_QUALIFIER = "uri"; + public String MD_SCHEMA = "dc"; + public String DOI_ELEMENT = "identifier"; + public String DOI_QUALIFIER = "doi"; // The DOI is queued for registered with the service provider public static final Integer TO_BE_REGISTERED = 1; // The DOI is queued for reservation with the service provider @@ -170,6 +172,17 @@ protected String getNamespaceSeparator() { return this.NAMESPACE_SEPARATOR; } + @PostConstruct + protected void setDoiMetadata() { + String doiMetadata = this.configurationService.getProperty(DOI_METADATA); + if (doiMetadata != null) { + String[] parts = doiMetadata.split("\\."); + this.MD_SCHEMA = parts[0]; + this.DOI_ELEMENT = parts[1]; + this.DOI_QUALIFIER = parts[2]; + } + } + /** * Set the DOI connector, which is the component that commuincates with the remote registration service * (eg. DataCite, EZID, Crossref) diff --git a/dspace-api/src/main/java/org/dspace/identifier/HandleIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/HandleIdentifierProvider.java index 3e7f9666c81d..0d764e9575a1 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/HandleIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/HandleIdentifierProvider.java @@ -68,10 +68,9 @@ public String register(Context context, DSpaceObject dso) { try { String id = mint(context, dso); - // move canonical to point the latest version + // Populate metadata if (dso instanceof Item || dso instanceof Collection || dso instanceof Community) { - Item item = (Item) dso; - populateHandleMetadata(context, item, id); + populateHandleMetadata(context, dso, id); } return id; @@ -88,8 +87,7 @@ public void register(Context context, DSpaceObject dso, String identifier) { try { handleService.createHandle(context, dso, identifier); if (dso instanceof Item || dso instanceof Collection || dso instanceof Community) { - Item item = (Item) dso; - populateHandleMetadata(context, item, identifier); + populateHandleMetadata(context, dso, identifier); } } catch (IOException | IllegalStateException | SQLException | AuthorizeException e) { log.error(LogHelper.getHeader(context, diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java index e7c786d5f8ce..e5a90907c7b6 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java @@ -27,13 +27,14 @@ import org.dspace.versioning.VersionHistory; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; /** * @author Marsa Haoua * @author Pascal-Nicolas Becker (dspace at pascal dash becker dot de) */ -public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { +public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider implements InitializingBean { /** * log4j category */ @@ -49,6 +50,19 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { @Autowired(required = true) protected VersionHistoryService versionHistoryService; + /** + * After all the properties are set check that the versioning is enabled + * + * @throws Exception throws an exception if this isn't the case + */ + @Override + public void afterPropertiesSet() throws Exception { + if (!configurationService.getBooleanProperty("versioning.enabled", true)) { + throw new RuntimeException("the " + VersionedDOIIdentifierProvider.class.getName() + + " is enabled, but the versioning is disabled."); + } + } + @Override public String mint(Context context, DSpaceObject dso) throws IdentifierException { return mint(context, dso, this.filter); @@ -66,7 +80,7 @@ public String mint(Context context, DSpaceObject dso, Filter filter) try { history = versionHistoryService.findByItem(context, item); } catch (SQLException ex) { - throw new RuntimeException("A problem occured while accessing the database.", ex); + throw new RuntimeException("A problem occurred while accessing the database.", ex); } String doi = null; @@ -76,7 +90,7 @@ public String mint(Context context, DSpaceObject dso, Filter filter) return doi; } } catch (SQLException ex) { - log.error("Error while attemping to retrieve information about a DOI for " + log.error("Error while attempting to retrieve information about a DOI for " + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " with ID " + dso.getID() + ".", ex); throw new RuntimeException("Error while attempting to retrieve " @@ -134,7 +148,7 @@ public String mint(Context context, DSpaceObject dso, Filter filter) if (history != null) { // versioning is currently supported for items only // if we have a history, we have a item - doi = makeIdentifierBasedOnHistory(context, dso, history); + doi = makeIdentifierBasedOnHistory(context, dso, history, filter); } else { doi = loadOrCreateDOI(context, dso, null, filter).getDoi(); } @@ -145,7 +159,7 @@ public String mint(Context context, DSpaceObject dso, Filter filter) log.error("AuthorizationException while creating a new DOI: ", ex); throw new IdentifierException(ex); } - return doi; + return doi.startsWith(DOI.SCHEME) ? doi : DOI.SCHEME + doi; } @Override @@ -153,6 +167,21 @@ public void register(Context context, DSpaceObject dso, String identifier) throw register(context, dso, identifier, this.filter); } + @Override + public String register(Context context, DSpaceObject dso, Filter filter) + throws IdentifierException { + if (!(dso instanceof Item)) { + // DOIs are currently assigned only to Items + return null; + } + + String doi = mint(context, dso, filter); + + register(context, dso, doi, filter); + + return doi; + } + @Override public void register(Context context, DSpaceObject dso, String identifier, Filter filter) throws IdentifierException { @@ -162,7 +191,7 @@ public void register(Context context, DSpaceObject dso, String identifier, Filte Item item = (Item) dso; if (StringUtils.isEmpty(identifier)) { - identifier = mint(context, dso); + identifier = mint(context, dso, filter); } String doiIdentifier = doiService.formatIdentifier(identifier); @@ -170,10 +199,10 @@ public void register(Context context, DSpaceObject dso, String identifier, Filte // search DOI in our db try { - doi = loadOrCreateDOI(context, dso, doiIdentifier); + doi = loadOrCreateDOI(context, dso, doiIdentifier, filter); } catch (SQLException ex) { - log.error("Error in databse connection: " + ex.getMessage(), ex); - throw new RuntimeException("Error in database conncetion.", ex); + log.error("Error in database connection: " + ex.getMessage(), ex); + throw new RuntimeException("Error in database connection.", ex); } if (DELETED.equals(doi.getStatus()) || diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java index 6a7e347bd13f..94f24baee941 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java @@ -35,6 +35,7 @@ import org.dspace.versioning.VersionHistory; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -45,7 +46,7 @@ * @author Pascal-Nicolas Becker (dspace at pascal dash becker dot de) */ @Component -public class VersionedHandleIdentifierProvider extends IdentifierProvider { +public class VersionedHandleIdentifierProvider extends IdentifierProvider implements InitializingBean { /** * log4j category */ @@ -71,6 +72,19 @@ public class VersionedHandleIdentifierProvider extends IdentifierProvider { @Autowired(required = true) protected ContentServiceFactory contentServiceFactory; + /** + * After all the properties are set check that the versioning is enabled + * + * @throws Exception throws an exception if this isn't the case + */ + @Override + public void afterPropertiesSet() throws Exception { + if (!configurationService.getBooleanProperty("versioning.enabled", true)) { + throw new RuntimeException("the " + VersionedHandleIdentifierProvider.class.getName() + + " is enabled, but the versioning is disabled."); + } + } + @Override public boolean supports(Class identifier) { return Handle.class.isAssignableFrom(identifier); diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java index 7705fd2b5762..d73dfa448db9 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java @@ -9,6 +9,7 @@ import java.io.IOException; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Date; import java.util.List; @@ -20,6 +21,7 @@ import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.MetadataValue; import org.dspace.content.service.ItemService; +import org.dspace.content.service.MetadataValueService; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.LogHelper; @@ -30,6 +32,7 @@ import org.dspace.versioning.VersionHistory; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -39,7 +42,8 @@ * @author Ben Bosman (ben at atmire dot com) */ @Component -public class VersionedHandleIdentifierProviderWithCanonicalHandles extends IdentifierProvider { +public class VersionedHandleIdentifierProviderWithCanonicalHandles extends IdentifierProvider + implements InitializingBean { /** * log4j category */ @@ -65,6 +69,22 @@ public class VersionedHandleIdentifierProviderWithCanonicalHandles extends Ident @Autowired(required = true) private ItemService itemService; + @Autowired() + private MetadataValueService metadataValueService; + + /** + * After all the properties are set check that the versioning is enabled + * + * @throws Exception throws an exception if this isn't the case + */ + @Override + public void afterPropertiesSet() throws Exception { + if (!configurationService.getBooleanProperty("versioning.enabled", true)) { + throw new RuntimeException("the " + VersionedHandleIdentifierProviderWithCanonicalHandles.class.getName() + + " is enabled, but the versioning is disabled."); + } + } + @Override public boolean supports(Class identifier) { return Handle.class.isAssignableFrom(identifier); @@ -80,11 +100,11 @@ public String register(Context context, DSpaceObject dso) { String id = mint(context, dso); // move canonical to point the latest version - if (dso != null && dso.getType() == Constants.ITEM) { + if (dso.getType() == Constants.ITEM && dso instanceof Item) { Item item = (Item) dso; - VersionHistory history = null; + VersionHistory history; try { - history = versionHistoryService.findByItem(context, (Item) dso); + history = versionHistoryService.findByItem(context, item); } catch (SQLException ex) { throw new RuntimeException("A problem with the database connection occured.", ex); } @@ -165,45 +185,46 @@ public String register(Context context, DSpaceObject dso) { @Override public void register(Context context, DSpaceObject dso, String identifier) { try { - - Item item = (Item) dso; - - // if for this identifier is already present a record in the Handle table and the corresponding item - // has an history someone is trying to restore the latest version for the item. When - // trying to restore the latest version the identifier in input doesn't have the for 1234/123.latestVersion - // it is the canonical 1234/123 - VersionHistory itemHistory = getHistory(context, identifier); - if (!identifier.matches(".*/.*\\.\\d+") && itemHistory != null) { - - int newVersionNumber = versionHistoryService.getLatestVersion(context, itemHistory) - .getVersionNumber() + 1; - String canonical = identifier; - identifier = identifier.concat(".").concat("" + newVersionNumber); - restoreItAsVersion(context, dso, identifier, item, canonical, itemHistory); - } else if (identifier.matches(".*/.*\\.\\d+")) { - // if identifier == 1234.5/100.4 reinstate the version 4 in the version table if absent - - // if it is a version of an item is needed to put back the record - // in the versionitem table - String canonical = getCanonical(identifier); - DSpaceObject canonicalItem = this.resolve(context, canonical); - if (canonicalItem == null) { - restoreItAsCanonical(context, dso, identifier, item, canonical); - } else { - VersionHistory history = versionHistoryService.findByItem(context, (Item) canonicalItem); - if (history == null) { + if (dso instanceof Item) { + Item item = (Item) dso; + // if this identifier is already present in the Handle table and the corresponding item + // has a history, then someone is trying to restore the latest version for the item. When + // trying to restore the latest version, the identifier in input doesn't have the + // 1234/123.latestVersion. Instead, it is the canonical 1234/123 + VersionHistory itemHistory = getHistory(context, identifier); + if (!identifier.matches(".*/.*\\.\\d+") && itemHistory != null) { + + int newVersionNumber = versionHistoryService.getLatestVersion(context, itemHistory) + .getVersionNumber() + 1; + String canonical = identifier; + identifier = identifier.concat(".").concat("" + newVersionNumber); + restoreItAsVersion(context, dso, identifier, item, canonical, itemHistory); + } else if (identifier.matches(".*/.*\\.\\d+")) { + // if identifier == 1234.5/100.4 reinstate the version 4 in the version table if absent + + // if it is a version of an item is needed to put back the record + // in the versionitem table + String canonical = getCanonical(identifier); + DSpaceObject canonicalItem = this.resolve(context, canonical); + if (canonicalItem == null) { restoreItAsCanonical(context, dso, identifier, item, canonical); } else { - restoreItAsVersion(context, dso, identifier, item, canonical, history); + VersionHistory history = versionHistoryService.findByItem(context, (Item) canonicalItem); + if (history == null) { + restoreItAsCanonical(context, dso, identifier, item, canonical); + } else { + restoreItAsVersion(context, dso, identifier, item, canonical, history); + } } + } else { + // A regular handle to create for an Item + createNewIdentifier(context, dso, identifier); + modifyHandleMetadata(context, item, getCanonical(identifier)); } } else { - //A regular handle + // Handle being registered for a different type of object (e.g. Collection or Community) createNewIdentifier(context, dso, identifier); - if (dso instanceof Item) { - modifyHandleMetadata(context, item, getCanonical(identifier)); - } } } catch (IOException | SQLException | AuthorizeException e) { log.error(LogHelper.getHeader(context, @@ -306,6 +327,7 @@ public String mint(Context context, DSpaceObject dso) { public DSpaceObject resolve(Context context, String identifier, String... attributes) { // We can do nothing with this, return null try { + identifier = handleService.parseHandle(identifier); return handleService.resolveToObject(context, identifier); } catch (IllegalStateException | SQLException e) { log.error(LogHelper.getHeader(context, "Error while resolving handle to item", "handle: " + identifier), @@ -426,6 +448,19 @@ protected String makeIdentifierBasedOnHistory(Context context, DSpaceObject dso, } } + DSpaceObject itemWithCanonicalHandle = handleService.resolveToObject(context, canonical); + if (itemWithCanonicalHandle != null) { + if (itemWithCanonicalHandle.getID() != previous.getItem().getID()) { + log.warn("The previous version's item (" + previous.getItem().getID() + + ") does not match with the item containing handle " + canonical + + " (" + itemWithCanonicalHandle.getID() + ")"); + } + // Move the original handle from whatever item it's on to the newest version + handleService.modifyHandleDSpaceObject(context, canonical, dso); + } else { + handleService.createHandle(context, dso, canonical); + } + // add a new Identifier for this item: 12345/100.x String idNew = canonical + DOT + version.getVersionNumber(); //Make sure we don't have an old handle hanging around (if our previous version was deleted in the workspace) @@ -474,12 +509,17 @@ protected void modifyHandleMetadata(Context context, Item item, String handle) String handleref = handleService.getCanonicalForm(handle); List identifiers = itemService .getMetadata(item, MetadataSchemaEnum.DC.getName(), "identifier", "uri", Item.ANY); - itemService.clearMetadata(context, item, MetadataSchemaEnum.DC.getName(), "identifier", "uri", Item.ANY); + List toRemove = new ArrayList<>(); for (MetadataValue identifier : identifiers) { if (this.supports(identifier.getValue())) { // ignore handles continue; } + + identifiers.remove(identifier); + toRemove.add(identifier); + metadataValueService.delete(context, identifier); + itemService.addMetadata(context, item, identifier.getMetadataField(), @@ -488,10 +528,15 @@ protected void modifyHandleMetadata(Context context, Item item, String handle) identifier.getAuthority(), identifier.getConfidence()); } + itemService.removeMetadataValues(context, item, toRemove); + + item = context.reloadEntity(item); + if (!StringUtils.isEmpty(handleref)) { itemService.addMetadata(context, item, MetadataSchemaEnum.DC.getName(), "identifier", "uri", null, handleref); } + itemService.setMetadataModified(item); itemService.update(context, item); } } diff --git a/dspace-api/src/main/java/org/dspace/identifier/doi/DOIConsumer.java b/dspace-api/src/main/java/org/dspace/identifier/doi/DOIConsumer.java index 1961ce82744c..33ef058e1696 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/doi/DOIConsumer.java +++ b/dspace-api/src/main/java/org/dspace/identifier/doi/DOIConsumer.java @@ -141,7 +141,6 @@ public void consume(Context ctx, Event event) throws Exception { + item.getID() + " and DOI " + doi + ".", ex); } } - ctx.commit(); } } diff --git a/dspace-api/src/main/java/org/dspace/identifier/doi/DataCiteConnector.java b/dspace-api/src/main/java/org/dspace/identifier/doi/DataCiteConnector.java index 57136d6143bb..43882918cd4a 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/doi/DataCiteConnector.java +++ b/dspace-api/src/main/java/org/dspace/identifier/doi/DataCiteConnector.java @@ -8,13 +8,14 @@ package org.dspace.identifier.doi; import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; import java.io.IOException; import java.net.URISyntaxException; import java.sql.SQLException; -import java.util.HashMap; import java.util.Iterator; import java.util.Map; +import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.StatusLine; @@ -35,13 +36,14 @@ import org.apache.http.util.EntityUtils; import org.dspace.authorize.AuthorizeException; import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; import org.dspace.content.crosswalk.CrosswalkException; -import org.dspace.content.crosswalk.DisseminationCrosswalk; -import org.dspace.content.crosswalk.ParameterizedDisseminationCrosswalk; +import org.dspace.content.crosswalk.StreamDisseminationCrosswalk; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.DSpaceObjectService; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.core.factory.CoreServiceFactory; import org.dspace.handle.service.HandleService; import org.dspace.identifier.DOI; import org.dspace.services.ConfigurationService; @@ -99,18 +101,6 @@ public class DataCiteConnector * dependency injection. */ protected String METADATA_PATH; - /** - * Name of crosswalk to convert metadata into DataCite Metadata Scheme. Set - * by spring dependency injection. - */ - protected String CROSSWALK_NAME; - /** - * DisseminationCrosswalk to map local metadata into DataCite metadata. - * The name of the crosswalk is set by spring dependency injection using - * {@link #setDisseminationCrosswalkName(String) setDisseminationCrosswalkName} which - * instantiates the crosswalk. - */ - protected ParameterizedDisseminationCrosswalk xwalk; protected ConfigurationService configurationService; @@ -119,8 +109,12 @@ public class DataCiteConnector @Autowired protected HandleService handleService; + @Autowired + private ItemService itemService; + + private Map disseminationCrosswalkByEntityType; + public DataCiteConnector() { - this.xwalk = null; this.USERNAME = null; this.PASSWORD = null; } @@ -189,34 +183,6 @@ public void setConfigurationService(ConfigurationService configurationService) { this.configurationService = configurationService; } - /** - * Set the name of the dissemination crosswalk used to convert the metadata - * into DataCite Metadata Schema. Used by spring dependency injection. - * - * @param CROSSWALK_NAME The name of the dissemination crosswalk to use. This - * crosswalk must be configured in dspace.cfg. - */ - @Autowired(required = true) - public void setDisseminationCrosswalkName(String CROSSWALK_NAME) { - this.CROSSWALK_NAME = CROSSWALK_NAME; - } - - protected void prepareXwalk() { - if (null != this.xwalk) { - return; - } - - this.xwalk = (ParameterizedDisseminationCrosswalk) CoreServiceFactory.getInstance().getPluginService() - .getNamedPlugin( - DisseminationCrosswalk.class, - this.CROSSWALK_NAME); - - if (this.xwalk == null) { - throw new RuntimeException("Can't find crosswalk '" - + CROSSWALK_NAME + "'!"); - } - } - protected String getUsername() { if (null == this.USERNAME) { this.USERNAME = this.configurationService.getProperty(CFG_USER); @@ -350,64 +316,43 @@ public void deleteDOI(Context context, String doi) @Override public void reserveDOI(Context context, DSpaceObject dso, String doi) throws DOIIdentifierException { - this.prepareXwalk(); DSpaceObjectService dSpaceObjectService = ContentServiceFactory.getInstance() .getDSpaceObjectService(dso); - if (!this.xwalk.canDisseminate(dso)) { - log.error("Crosswalk " + this.CROSSWALK_NAME - + " cannot disseminate DSO with type " + dso.getType() - + " and ID " + dso.getID() + ". Giving up reserving the DOI " - + doi + "."); - throw new DOIIdentifierException("Cannot disseminate " - + dSpaceObjectService.getTypeText(dso) + "/" + dso.getID() - + " using crosswalk " + this.CROSSWALK_NAME + ".", - DOIIdentifierException.CONVERSION_ERROR); - } + StreamDisseminationCrosswalk xwalk = getStreamDisseminationCrosswalkByDso(dso); - // Set the transform's parameters. - // XXX Should the actual list be configurable? - Map parameters = new HashMap<>(); - if (configurationService.hasProperty(CFG_PREFIX)) { - parameters.put("prefix", - configurationService.getProperty(CFG_PREFIX)); - } - if (configurationService.hasProperty(CFG_PUBLISHER)) { - parameters.put("publisher", - configurationService.getProperty(CFG_PUBLISHER)); - } - if (configurationService.hasProperty(CFG_DATAMANAGER)) { - parameters.put("datamanager", - configurationService.getProperty(CFG_DATAMANAGER)); - } - if (configurationService.hasProperty(CFG_HOSTINGINSTITUTION)) { - parameters.put("hostinginstitution", - configurationService.getProperty(CFG_HOSTINGINSTITUTION)); + if (xwalk == null) { + log.error("No crosswalk found for DSO with type " + dso.getType() + + " and ID " + dso.getID() + ". Giving up reserving the DOI " + + doi + "."); + throw new DOIIdentifierException("Cannot disseminate " + + dSpaceObjectService.getTypeText(dso) + "/" + dso.getID() + ".", + DOIIdentifierException.CONVERSION_ERROR); } Element root = null; try { - root = xwalk.disseminateElement(context, dso, parameters); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + xwalk.disseminate(context, dso, baos); + SAXBuilder builder = new SAXBuilder(); + Document document = builder.build(new ByteArrayInputStream(baos.toByteArray())); + root = document.getRootElement(); } catch (AuthorizeException ae) { log.error("Caught an AuthorizeException while disseminating DSO " + "with type " + dso.getType() + " and ID " + dso.getID() + ". Giving up to reserve DOI " + doi + ".", ae); throw new DOIIdentifierException("AuthorizeException occured while " - + "converting " + dSpaceObjectService.getTypeText(dso) + "/" + dso - .getID() - + " using crosswalk " + this.CROSSWALK_NAME + ".", ae, + + "converting " + dSpaceObjectService.getTypeText(dso) + "/" + dso + ".", ae, DOIIdentifierException.CONVERSION_ERROR); } catch (CrosswalkException ce) { log.error("Caught an CrosswalkException while reserving a DOI (" + doi + ") for DSO with type " + dso.getType() + " and ID " + dso.getID() + ". Won't reserve the doi.", ce); throw new DOIIdentifierException("CrosswalkException occured while " - + "converting " + dSpaceObjectService.getTypeText(dso) + "/" + dso - .getID() - + " using crosswalk " + this.CROSSWALK_NAME + ".", ce, + + "converting " + dSpaceObjectService.getTypeText(dso) + "/" + dso + ".", ce, DOIIdentifierException.CONVERSION_ERROR); - } catch (IOException | SQLException ex) { + } catch (IOException | SQLException | JDOMException ex) { throw new RuntimeException(ex); } @@ -462,6 +407,21 @@ public void reserveDOI(Context context, DSpaceObject dso, String doi) } } + private StreamDisseminationCrosswalk getStreamDisseminationCrosswalkByDso(DSpaceObject dso) { + + if (dso.getType() != Constants.ITEM) { + return null; + } + + String entityType = itemService.getEntityType((Item) dso); + if (StringUtils.isBlank(entityType)) { + entityType = "Publication"; + } + + return disseminationCrosswalkByEntityType.get(entityType); + + } + @Override public void registerDOI(Context context, DSpaceObject dso, String doi) throws DOIIdentifierException { @@ -631,7 +591,7 @@ protected DataCiteResponse sendMetadataPostRequest(String doi, Element metadataR Format format = Format.getCompactFormat(); format.setEncoding("UTF-8"); XMLOutputter xout = new XMLOutputter(format); - return sendMetadataPostRequest(doi, xout.outputString(new Document(metadataRoot))); + return sendMetadataPostRequest(doi, xout.outputString(metadataRoot.getDocument())); } protected DataCiteResponse sendMetadataPostRequest(String doi, String metadata) @@ -842,12 +802,21 @@ protected Element addDOI(String doi, Element root) { } Element identifier = new Element("identifier", configurationService.getProperty(CFG_NAMESPACE, - "http://datacite.org/schema/kernel-3")); + "http://datacite.org/schema/kernel-4")); identifier.setAttribute("identifierType", "DOI"); identifier.addContent(doi.substring(DOI.SCHEME.length())); return root.addContent(0, identifier); } + public Map getDisseminationCrosswalkByEntityType() { + return disseminationCrosswalkByEntityType; + } + + public void setDisseminationCrosswalkByEntityType( + Map disseminationCrosswalkByEntityType) { + this.disseminationCrosswalkByEntityType = disseminationCrosswalkByEntityType; + } + protected class DataCiteResponse { private final int statusCode; private final String content; diff --git a/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSImportMetadataSourceServiceImpl.java index 8fbe4ef2cf57..da59472c45a6 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSImportMetadataSourceServiceImpl.java @@ -273,6 +273,9 @@ public Integer count(String query, String token) { uriBuilder.addParameter("fl", this.resultFieldList); String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + if (StringUtils.isEmpty(resp)) { + return 0; + } JsonNode jsonNode = convertStringJsonToJsonNode(resp); return jsonNode.at("/response/numFound").asInt(); } catch (URISyntaxException e) { @@ -296,6 +299,9 @@ public List search(String query, Integer start, Integer count, Str uriBuilder.addParameter("fl", this.resultFieldList); String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + if (StringUtils.isEmpty(resp)) { + return adsResults; + } JsonNode jsonNode = convertStringJsonToJsonNode(resp); JsonNode docs = jsonNode.at("/response/docs"); diff --git a/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java index 0014088c8650..4b6a5aa92e72 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java @@ -117,4 +117,9 @@ public void setMetadataFieldMap(@SuppressWarnings("rawtypes") Map metadataFieldM super.setMetadataFieldMap(metadataFieldMap); } + @Override + public boolean canImportMultipleRecords() { + return true; + } + } diff --git a/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiImportMetadataSourceServiceImpl.java index 5eff46c790e4..4c5d8ab8255b 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiImportMetadataSourceServiceImpl.java @@ -289,6 +289,9 @@ protected List search(String id, String appId) URIBuilder uriBuilder = new URIBuilder(this.url + id + ".rdf?appid=" + appId); Map> params = new HashMap>(); String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + if (StringUtils.isBlank(response)) { + return records; + } List elements = splitToRecords(response); for (Element record : elements) { records.add(transformSourceRecords(record)); @@ -303,6 +306,8 @@ protected List search(String id, String appId) private List splitToRecords(String recordsSrc) { try { SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(recordsSrc)); Element root = document.getRootElement(); return root.getChildren(); @@ -356,6 +361,8 @@ private List getCiniiIds(String appId, Integer maxResult, String author, String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); int url_len = this.url.length() - 1; SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); Element root = document.getRootElement(); List namespaces = Arrays.asList( @@ -416,8 +423,13 @@ private Integer countCiniiElement(String appId, Integer maxResult, String author Map> params = new HashMap>(); String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + if (StringUtils.isEmpty(response)) { + return 0; + } SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); Element root = document.getRootElement(); List namespaces = Arrays diff --git a/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java new file mode 100644 index 000000000000..dec0b050f396 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java @@ -0,0 +1,79 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.crossref; + +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.contributor.JsonPathMetadataProcessor; +import org.joda.time.LocalDate; + +/** + * This class is used for CrossRef's Live-Import to extract + * issued attribute. + * Beans are configured in the crossref-integration.xml file. + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class CrossRefDateMetadataProcessor implements JsonPathMetadataProcessor { + + private final static Logger log = LogManager.getLogger(); + + private String pathToArray; + + @Override + public Collection processMetadata(String json) { + JsonNode rootNode = convertStringJsonToJsonNode(json); + Iterator dates = rootNode.at(pathToArray).iterator(); + Collection values = new ArrayList<>(); + while (dates.hasNext()) { + JsonNode date = dates.next(); + LocalDate issuedDate = null; + SimpleDateFormat issuedDateFormat = null; + if (date.has(0) && date.has(1) && date.has(2)) { + issuedDate = new LocalDate( + date.get(0).numberValue().intValue(), + date.get(1).numberValue().intValue(), + date.get(2).numberValue().intValue()); + issuedDateFormat = new SimpleDateFormat("yyyy-MM-dd"); + } else if (date.has(0) && date.has(1)) { + issuedDate = new LocalDate().withYear(date.get(0).numberValue().intValue()) + .withMonthOfYear(date.get(1).numberValue().intValue()); + issuedDateFormat = new SimpleDateFormat("yyyy-MM"); + } else if (date.has(0)) { + issuedDate = new LocalDate().withYear(date.get(0).numberValue().intValue()); + issuedDateFormat = new SimpleDateFormat("yyyy"); + } + values.add(issuedDateFormat.format(issuedDate.toDate())); + } + return values; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return body; + } + + public void setPathToArray(String pathToArray) { + this.pathToArray = pathToArray; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefImportMetadataSourceServiceImpl.java index 7dde330b27ec..5c4c49deaec9 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefImportMetadataSourceServiceImpl.java @@ -158,6 +158,9 @@ public List call() throws Exception { } Map> params = new HashMap>(); String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + if (StringUtils.isEmpty(response)) { + return results; + } JsonNode jsonNode = convertStringJsonToJsonNode(response); Iterator nodes = jsonNode.at("/message/items").iterator(); while (nodes.hasNext()) { @@ -194,6 +197,9 @@ public List call() throws Exception { URIBuilder uriBuilder = new URIBuilder(url + "/" + ID); Map> params = new HashMap>(); String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + if (StringUtils.isEmpty(responseString)) { + return results; + } JsonNode jsonNode = convertStringJsonToJsonNode(responseString); JsonNode messageNode = jsonNode.at("/message"); results.add(transformSourceRecords(messageNode.toString())); @@ -246,6 +252,9 @@ public List call() throws Exception { } Map> params = new HashMap>(); String resp = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + if (StringUtils.isEmpty(resp)) { + return results; + } JsonNode jsonNode = convertStringJsonToJsonNode(resp); Iterator nodes = jsonNode.at("/message/items").iterator(); while (nodes.hasNext()) { @@ -284,6 +293,9 @@ public Integer call() throws Exception { uriBuilder.addParameter("query", query.getParameterAsClass("query", String.class)); Map> params = new HashMap>(); String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + if (StringUtils.isEmpty(responseString)) { + return 0; + } JsonNode jsonNode = convertStringJsonToJsonNode(responseString); return jsonNode.at("/message/total-results").asInt(); } diff --git a/dspace-api/src/main/java/org/dspace/importer/external/datamodel/ImportRecord.java b/dspace-api/src/main/java/org/dspace/importer/external/datamodel/ImportRecord.java index e2cb24f4b578..b686169f9a21 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/datamodel/ImportRecord.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/datamodel/ImportRecord.java @@ -11,7 +11,9 @@ import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Optional; +import org.dspace.content.MetadataFieldName; import org.dspace.importer.external.metadatamapping.MetadatumDTO; /** @@ -94,6 +96,17 @@ public Collection getValue(String schema, String element, String q return values; } + public Optional getSingleValue(String field) { + MetadataFieldName metadataFieldName = new MetadataFieldName(field); + return getSingleValue(metadataFieldName.schema, metadataFieldName.element, metadataFieldName.qualifier); + } + + public Optional getSingleValue(String schema, String element, String qualifier) { + return getValue(schema, element, qualifier).stream() + .map(MetadatumDTO::getValue) + .findFirst(); + } + /** * Add a value to the valueList * diff --git a/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoImportMetadataSourceServiceImpl.java index 395d6b48c987..133717b90e79 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoImportMetadataSourceServiceImpl.java @@ -59,6 +59,7 @@ * Implements a data source for querying EPO * * @author Pasquale Cavallo (pasquale.cavallo at 4Science dot it) + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4Science.com) */ public class EpoImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService implements QuerySource { @@ -147,6 +148,9 @@ protected String login() throws IOException, HttpException { Map> params = getLoginParams(); String entity = "grant_type=client_credentials"; String json = liveImportClient.executeHttpPostRequest(this.authUrl, params, entity); + if (StringUtils.isBlank(json)) { + return json; + } ObjectMapper mapper = new ObjectMapper(new JsonFactory()); JsonNode rootNode = mapper.readTree(json); JsonNode accessTokenNode = rootNode.get("access_token"); @@ -190,7 +194,8 @@ public int getRecordsCount(Query query) throws MetadataSourceException { String bearer = login(); return retry(new CountRecordsCallable(query, bearer)); } catch (IOException | HttpException e) { - e.printStackTrace(); + log.warn(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); } } return 0; @@ -204,7 +209,7 @@ public Collection getRecords(String query, int start, String bearer = login(); return retry(new SearchByQueryCallable(query, bearer, start, count)); } catch (IOException | HttpException e) { - log.warn(e.getMessage()); + log.warn(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } } @@ -247,14 +252,12 @@ public ImportRecord getRecord(Query query) throws MetadataSourceException { } @Override - public Collection findMatchingRecords(Item item) - throws MetadataSourceException { + public Collection findMatchingRecords(Item item) throws MetadataSourceException { return null; } @Override - public Collection findMatchingRecords(Query query) - throws MetadataSourceException { + public Collection findMatchingRecords(Query query) throws MetadataSourceException { return null; } @@ -303,7 +306,13 @@ private SearchByIdCallable(String id, String bearer) { } public List call() throws Exception { - if (id.contains(APP_NO_DATE_SEPARATOR)) { + int positionToSplit = id.indexOf(":"); + String docType = EpoDocumentId.EPODOC; + String idS = id; + if (positionToSplit != -1) { + docType = id.substring(0, positionToSplit); + idS = id.substring(positionToSplit + 1, id.length()); + } else if (id.contains(APP_NO_DATE_SEPARATOR)) { // special case the id is the combination of the applicationnumber and date filed String query = "applicationnumber=" + id.split(APP_NO_DATE_SEPARATOR_REGEX)[0]; SearchByQueryCallable search = new SearchByQueryCallable(query, bearer, 0, 10); @@ -316,12 +325,7 @@ public List call() throws Exception { return records; } // search by Patent Number - String[] identifier = id.split(":"); - String patentIdentifier = identifier.length == 2 ? identifier[1] : id; - List records = retry(new SearchByQueryCallable(patentIdentifier, bearer, null, null)); - if (records.size() > 1) { - log.warn("More record are returned with Patent Number: " + id); - } + List records = searchDocument(bearer, idS, docType); return records; } } @@ -375,7 +379,7 @@ public List call() throws Exception { private Integer countDocument(String bearer, String query) { if (StringUtils.isBlank(bearer)) { - return null; + return 0; } try { Map> params = new HashMap>(); @@ -388,8 +392,13 @@ private Integer countDocument(String bearer, String query) { uriBuilder.addParameter("q", query); String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + if (StringUtils.isBlank(response)) { + return 0; + } SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); Element root = document.getRootElement(); @@ -402,7 +411,7 @@ private Integer countDocument(String bearer, String query) { return Integer.parseInt(totalRes); } catch (JDOMException | IOException | URISyntaxException | JaxenException e) { log.error(e.getMessage(), e); - return null; + return 0; } } @@ -425,8 +434,13 @@ private List searchDocumentIds(String bearer, String query, int s uriBuilder.addParameter("q", query); String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + if (StringUtils.isBlank(response)) { + return results; + } SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); Element root = document.getRootElement(); @@ -465,6 +479,9 @@ private List searchDocument(String bearer, String id, String docTy String url = this.url.replace("$(doctype)", docType).replace("$(id)", id); String response = liveImportClient.executeHttpGetRequest(1000, url, params); + if (StringUtils.isBlank(response)) { + return results; + } List elements = splitToRecords(response); for (Element element : elements) { results.add(transformSourceRecords(element)); @@ -478,10 +495,12 @@ private List searchDocument(String bearer, String id, String docTy private List splitToRecords(String recordsSrc) { try { SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(recordsSrc)); Element root = document.getRootElement(); List namespaces = Arrays.asList(Namespace.getNamespace("ns", "http://www.epo.org/exchange")); - XPathExpression xpath = XPathFactory.instance().compile("//ns:exchange-document", + XPathExpression xpath = XPathFactory.instance().compile("//ns:exchange-documents", Filters.element(), null, namespaces); List recordsList = xpath.evaluate(root); diff --git a/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClientImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClientImpl.java index 81a6631127ac..1a8a7a7861ed 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClientImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClientImpl.java @@ -60,7 +60,8 @@ public String executeHttpGetRequest(int timeout, String URL, Map headerParams = params.get(HEADER_PARAMETERS); @@ -71,7 +72,9 @@ public String executeHttpGetRequest(int timeout, String URL, Map getMetadataOfAuthors(Element element) throws JaxenExc addMetadatum(metadatums, getMetadata(getElementValue(surname) + ", " + getElementValue(givenName), this.authname)); - addMetadatum(metadatums, getMetadata(getElementValue(scopusId), this.scopusId)); - addMetadatum(metadatums, getMetadata(getElementValue(orcid), this.orcid)); - addMetadatum(metadatums, getMetadata(StringUtils.isNotBlank(afid.getValue()) - ? this.affId2affName.get(afid.getValue()) : null, this.affiliation)); + if (this.scopusId != null) { + addMetadatum(metadatums, getMetadata(getElementValue(scopusId), this.scopusId)); + } + if (this.orcid != null) { + addMetadatum(metadatums, getMetadata(getElementValue(orcid), this.orcid)); + } + if (this.affiliation != null) { + addMetadatum(metadatums, getMetadata(StringUtils.isNotBlank(afid.getValue()) + ? this.affId2affName.get(afid.getValue()) : null, this.affiliation)); + } return metadatums; } diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/RorParentOrgUnitMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/RorParentOrgUnitMetadataContributor.java new file mode 100644 index 000000000000..be1910d7a521 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/RorParentOrgUnitMetadataContributor.java @@ -0,0 +1,109 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; + +public class RorParentOrgUnitMetadataContributor extends SimpleJsonPathMetadataContributor { + + private String typeField; + + private String parentType; + + private String labelField; + + /** + * Retrieve the metadata associated with the given object. + * The toString() of the resulting object will be used. + * + * @param fullJson A class to retrieve metadata from. + * @return a collection of import records. Only the identifier of the found records may be put in the record. + */ + @Override + public Collection contributeMetadata(String fullJson) { + + Collection metadata = new ArrayList<>(); + Collection metadataValue = new ArrayList<>(); + + JsonNode jsonNode = convertStringJsonToJsonNode(fullJson); + JsonNode array = jsonNode.at(getQuery()); + if (!array.isArray()) { + return metadata; + } + + Iterator nodes = array.iterator(); + while (nodes.hasNext()) { + JsonNode node = nodes.next(); + + if (!node.has(labelField)) { + continue; + } + + String type = node.has(typeField) ? node.get(typeField).asText() : null; + String label = node.get(labelField).asText(); + + if (parentType.equalsIgnoreCase(type)) { + metadataValue.add(label); + } + + } + + for (String value : metadataValue) { + MetadatumDTO metadatumDto = new MetadatumDTO(); + metadatumDto.setValue(value); + metadatumDto.setElement(getField().getElement()); + metadatumDto.setQualifier(getField().getQualifier()); + metadatumDto.setSchema(getField().getSchema()); + metadata.add(metadatumDto); + } + return metadata; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + return body; + } + + public String getTypeField() { + return typeField; + } + + public void setTypeField(String typeField) { + this.typeField = typeField; + } + + public String getLabelField() { + return labelField; + } + + public void setLabelField(String labelField) { + this.labelField = labelField; + } + + public String getParentType() { + return parentType; + } + + public void setParentType(String parentType) { + this.parentType = parentType; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumAndAttributeAndSubNodeContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumAndAttributeAndSubNodeContributor.java new file mode 100644 index 000000000000..aae07b1ff263 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumAndAttributeAndSubNodeContributor.java @@ -0,0 +1,87 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; + +/** + * Metadata contributor that takes multiple value of the some nome. + * Can fileter also nedes by attribute element value. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class SimpleXpathMetadatumAndAttributeAndSubNodeContributor extends SimpleXpathMetadatumAndAttributeContributor { + + private String attributeValue; + private String queryToSubNode; + + @Override + public Collection contributeMetadata(Element t) { + List values = new LinkedList<>(); + List namespaces = new ArrayList(); + for (String ns : prefixToNamespaceMapping.keySet()) { + namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns)); + } + + List nodes = getNodes(t, query, namespaces); + List subNodes = getSubNodes(namespaces, nodes); + for (Object el : subNodes) { + if (el instanceof Element) { + values.add(metadataFieldMapping.toDCValue(this.field, extractValue(el))); + } + } + return values; + } + + private List getSubNodes(List namespaces, List nodes) { + List allNodes = new ArrayList(); + for (Object el : nodes) { + if (el instanceof Element) { + List elements = ((Element) el).getChildren(); + for (Element element : elements) { + String attributeValue = element.getAttributeValue(this.attribute); + if (StringUtils.equals(attributeValue, this.attributeValue)) { + List subNodes = getNodes(element, queryToSubNode, namespaces); + allNodes.addAll(subNodes); + } + } + } + } + return allNodes; + } + + private List getNodes(Element t, String query, List namespaces) { + XPathExpression xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(),null, namespaces); + return xpath.evaluate(t); + } + + private String extractValue(Object el) { + String value = ((Element) el).getText(); + return StringUtils.isNotBlank(value) ? value : ((Element) el).getValue().trim(); + } + + public void setAttributeValue(String attributeValue) { + this.attributeValue = attributeValue; + } + + public void setQueryToSubNode(String queryToSubNode) { + this.queryToSubNode = queryToSubNode; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumAndAttributeContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumAndAttributeContributor.java index dea840d15b38..1fd9d168338d 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumAndAttributeContributor.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumAndAttributeContributor.java @@ -33,7 +33,7 @@ public class SimpleXpathMetadatumAndAttributeContributor extends SimpleXpathMeta private final static Logger log = LogManager.getLogger(); - private String attribute; + protected String attribute; @Override public Collection contributeMetadata(Element t) { diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java index ba2316755300..f1053fe19ccb 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java @@ -14,9 +14,11 @@ import java.util.Date; import java.util.LinkedList; import java.util.List; +import java.util.Locale; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; -import org.dspace.content.DCDate; +import org.dspace.core.I18nUtil; import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; import org.dspace.importer.external.metadatamapping.MetadatumDTO; @@ -99,7 +101,7 @@ public PubmedDateMetadatumContributor(MetadataFieldConfig field, MetadataContrib @Override public Collection contributeMetadata(T t) { List values = new LinkedList<>(); - + final Locale defaultLocale = I18nUtil.getDefaultLocale(); try { LinkedList yearList = (LinkedList) year.contributeMetadata(t); @@ -107,26 +109,30 @@ public Collection contributeMetadata(T t) { LinkedList dayList = (LinkedList) day.contributeMetadata(t); for (int i = 0; i < yearList.size(); i++) { - DCDate dcDate = null; + String resultDateString = ""; String dateString = ""; + SimpleDateFormat resultFormatter = null; if (monthList.size() > i && dayList.size() > i) { dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue() + "-" + dayList.get(i).getValue(); + resultFormatter = new SimpleDateFormat("yyyy-MM-dd", defaultLocale); } else if (monthList.size() > i) { dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue(); + resultFormatter = new SimpleDateFormat("yyyy-MM", defaultLocale); } else { dateString = yearList.get(i).getValue(); + resultFormatter = new SimpleDateFormat("yyyy", defaultLocale); } int j = 0; // Use the first dcDate that has been formatted (Config should go from most specific to most lenient) - while (j < dateFormatsToAttempt.size() && dcDate == null) { + while (j < dateFormatsToAttempt.size() && StringUtils.isBlank(resultDateString)) { String dateFormat = dateFormatsToAttempt.get(j); try { - SimpleDateFormat formatter = new SimpleDateFormat(dateFormat); + SimpleDateFormat formatter = new SimpleDateFormat(dateFormat, defaultLocale); Date date = formatter.parse(dateString); - dcDate = new DCDate(date); + resultDateString = resultFormatter.format(date); } catch (ParseException e) { // Multiple dateformats can be configured, we don't want to print the entire stacktrace every // time one of those formats fails. @@ -136,8 +142,8 @@ public Collection contributeMetadata(T t) { } j++; } - if (dcDate != null) { - values.add(metadataFieldMapping.toDCValue(field, dcDate.toString())); + if (StringUtils.isNotBlank(resultDateString)) { + values.add(metadataFieldMapping.toDCValue(field, resultDateString)); } else { log.info( "Failed parsing " + dateString + ", check " + diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java index b30ea22ca4e4..a6cfa625bbcf 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java @@ -292,7 +292,14 @@ public Collection call() throws Exception { int countAttempt = 0; while (StringUtils.isBlank(response) && countAttempt <= attempt) { countAttempt++; + + long time = System.currentTimeMillis() - lastRequest; + if ((time) < interRequestTime) { + Thread.sleep(interRequestTime - time); + } + response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + lastRequest = System.currentTimeMillis(); } if (StringUtils.isBlank(response)) { @@ -316,7 +323,13 @@ public Collection call() throws Exception { countAttempt = 0; while (StringUtils.isBlank(response2) && countAttempt <= attempt) { countAttempt++; + long time = System.currentTimeMillis() - lastRequest; + if ((time) < interRequestTime) { + Thread.sleep(interRequestTime - time); + } response2 = liveImportClient.executeHttpGetRequest(1000, uriBuilder2.toString(), params2); + + lastRequest = System.currentTimeMillis(); } if (StringUtils.isBlank(response2)) { @@ -338,6 +351,11 @@ public Collection call() throws Exception { private List splitToRecords(String recordsSrc) { try { SAXBuilder saxBuilder = new SAXBuilder(); + // Disallow external entities & entity expansion to protect against XXE attacks + // (NOTE: We receive errors if we disable all DTDs for PubMed, so this is the best we can do) + saxBuilder.setFeature("http://xml.org/sax/features/external-general-entities", false); + saxBuilder.setFeature("http://xml.org/sax/features/external-parameter-entities", false); + saxBuilder.setExpandEntities(false); Document document = saxBuilder.build(new StringReader(recordsSrc)); Element root = document.getRootElement(); @@ -418,7 +436,13 @@ public Collection call() throws Exception { int countAttempt = 0; while (StringUtils.isBlank(response) && countAttempt <= attempt) { countAttempt++; + long time = System.currentTimeMillis() - lastRequest; + if ((time) < interRequestTime) { + Thread.sleep(interRequestTime - time); + } + response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + lastRequest = System.currentTimeMillis(); } if (StringUtils.isBlank(response)) { @@ -441,7 +465,12 @@ public Collection call() throws Exception { countAttempt = 0; while (StringUtils.isBlank(response2) && countAttempt <= attempt) { countAttempt++; + long time = System.currentTimeMillis() - lastRequest; + if ((time) < interRequestTime) { + Thread.sleep(interRequestTime - time); + } response2 = liveImportClient.executeHttpGetRequest(1000, uriBuilder2.toString(), params2); + lastRequest = System.currentTimeMillis(); } if (StringUtils.isBlank(response2)) { @@ -501,4 +530,4 @@ public void setUrlSearch(String urlSearch) { this.urlSearch = urlSearch; } -} \ No newline at end of file +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeMetadataSourceServiceImpl.java index 1ec0da74206e..217fe18f1cc5 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeMetadataSourceServiceImpl.java @@ -292,8 +292,13 @@ public Integer count(String query) throws URISyntaxException, ClientProtocolExce try { Map> params = new HashMap>(); String response = liveImportClient.executeHttpGetRequest(1000, buildURI(1, query), params); + if (StringUtils.isEmpty(response)) { + return 0; + } SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); Element root = document.getRootElement(); Element element = root.getChild("hitCount"); @@ -365,6 +370,8 @@ public List search(String query, Integer size, Integer start) thro String cursorMark = StringUtils.EMPTY; if (StringUtils.isNotBlank(response)) { SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); XPathFactory xpfac = XPathFactory.instance(); XPathExpression xPath = xpfac.compile("//responseWrapper/resultList/result", diff --git a/dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorFieldMapping.java new file mode 100644 index 000000000000..5248d793e292 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorFieldMapping.java @@ -0,0 +1,38 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.ror.service; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the Scopus metadatum fields on the DSpace metadatum fields + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +public class RorFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @Resource(name = "rorMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorImportMetadataSourceServiceImpl.java new file mode 100644 index 000000000000..650e939e3dd0 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorImportMetadataSourceServiceImpl.java @@ -0,0 +1,285 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.ror.service; + +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.stream.Collectors; +import javax.el.MethodNotFoundException; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.client.utils.URIBuilder; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.components.QuerySource; +import org.springframework.beans.factory.annotation.Autowired; + +public class RorImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private final static Logger log = LogManager.getLogger(); + + private String url; + + private int timeout = 1000; + + @Autowired + private LiveImportClient liveImportClient; + + @Override + public String getImportSource() { + return "ror"; + } + + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + List records = retry(new SearchByIdCallable(id)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query, start)); + } + + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query)); + } + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + List records = retry(new SearchByIdCallable(query)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for ROR"); + } + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for ROR"); + } + + @Override + public void init() throws Exception { + } + + /** + * This class is a Callable implementation to get ADS entries based on query + * object. This Callable use as query value the string queryString passed to + * constructor. If the object will be construct through Query.class instance, a + * Query's map entry with key "query" will be used. Pagination is supported too, + * using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByQueryCallable implements Callable> { + + private Query query; + + private SearchByQueryCallable(String queryString, int start) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("start", start); + } + + private SearchByQueryCallable(Query query) { + this.query = query; + } + + @Override + public List call() throws Exception { + return search(query.getParameterAsClass("query", String.class), + query.getParameterAsClass("start", Integer.class)); + } + } + + /** + * This class is a Callable implementation to get an ADS entry using bibcode The + * bibcode to use can be passed through the constructor as a String or as + * Query's map entry, with the key "id". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByIdCallable implements Callable> { + private Query query; + + private SearchByIdCallable(Query query) { + this.query = query; + } + + private SearchByIdCallable(String id) { + this.query = new Query(); + query.addParameter("id", id); + } + + @Override + public List call() throws Exception { + return searchById(query.getParameterAsClass("id", String.class)); + } + } + + /** + * This class is a Callable implementation to count the number of entries for an + * ADS query. This Callable use as query value to ADS the string queryString + * passed to constructor. If the object will be construct through Query.class + * instance, the value of the Query's map with the key "query" will be used. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class CountByQueryCallable implements Callable { + private Query query; + + private CountByQueryCallable(String queryString) { + query = new Query(); + query.addParameter("query", queryString); + } + + private CountByQueryCallable(Query query) { + this.query = query; + } + + @Override + public Integer call() throws Exception { + return count(query.getParameterAsClass("query", String.class)); + } + } + + public Integer count(String query) { + try { + Map> params = new HashMap>(); + + URIBuilder uriBuilder = new URIBuilder(this.url); + uriBuilder.addParameter("query", query); + + String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + if (StringUtils.isEmpty(resp)) { + return 0; + } + JsonNode jsonNode = convertStringJsonToJsonNode(resp); + return jsonNode.at("/number_of_results").asInt(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } + return 0; + } + + private List searchById(String id) { + + List adsResults = new ArrayList<>(); + + id = StringUtils.removeStart(id, "https://ror.org/"); + + try { + Map> params = new HashMap>(); + + URIBuilder uriBuilder = new URIBuilder(this.url + "/" + id); + + String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + if (StringUtils.isEmpty(resp)) { + return adsResults; + } + + JsonNode jsonNode = convertStringJsonToJsonNode(resp); + adsResults.add(transformSourceRecords(jsonNode.toString())); + + } catch (URISyntaxException e) { + e.printStackTrace(); + } + return adsResults; + } + + private List search(String query, Integer start) { + List adsResults = new ArrayList<>(); + try { + Map> params = new HashMap>(); + + URIBuilder uriBuilder = new URIBuilder(this.url); + uriBuilder.addParameter("query", query); + if (start != null) { + uriBuilder.addParameter("page", String.valueOf((start / 20) + 1)); + } + + String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + if (StringUtils.isEmpty(resp)) { + return adsResults; + } + + JsonNode jsonNode = convertStringJsonToJsonNode(resp); + JsonNode docs = jsonNode.at("/items"); + if (docs.isArray()) { + Iterator nodes = docs.elements(); + while (nodes.hasNext()) { + JsonNode node = nodes.next(); + adsResults.add(transformSourceRecords(node.toString())); + } + } else { + adsResults.add(transformSourceRecords(docs.toString())); + } + } catch (URISyntaxException e) { + e.printStackTrace(); + } + + if (start == null) { + return adsResults; + } + + if (start % 20 == 0) { + return adsResults.stream() + .limit(10) + .collect(Collectors.toList()); + } else { + return adsResults.stream() + .skip(10) + .collect(Collectors.toList()); + } + } + + private JsonNode convertStringJsonToJsonNode(String json) { + try { + return new ObjectMapper().readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return null; + } + + public void setUrl(String url) { + this.url = url; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusImportMetadataSourceServiceImpl.java index d0c2fb078a2c..273a3455a78c 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusImportMetadataSourceServiceImpl.java @@ -200,8 +200,13 @@ public Integer call() throws Exception { Map requestParams = getRequestParameters(query, null, null, null); params.put(URI_PARAMETERS, requestParams); String response = liveImportClient.executeHttpGetRequest(timeout, url, params); + if (StringUtils.isEmpty(response)) { + return 0; + } SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); Element root = document.getRootElement(); @@ -243,6 +248,9 @@ public List call() throws Exception { Map requestParams = getRequestParameters(queryString, viewMode, null, null); params.put(URI_PARAMETERS, requestParams); String response = liveImportClient.executeHttpGetRequest(timeout, url, params); + if (StringUtils.isEmpty(response)) { + return results; + } List elements = splitToRecords(response); for (Element record : elements) { results.add(transformSourceRecords(record)); @@ -302,6 +310,9 @@ public List call() throws Exception { Map requestParams = getRequestParameters(queryString, viewMode, start, count); params.put(URI_PARAMETERS, requestParams); String response = liveImportClient.executeHttpGetRequest(timeout, url, params); + if (StringUtils.isEmpty(response)) { + return results; + } List elements = splitToRecords(response); for (Element record : elements) { results.add(transformSourceRecords(record)); @@ -347,6 +358,9 @@ public List call() throws Exception { Map requestParams = getRequestParameters(queryString, viewMode, start, count); params.put(URI_PARAMETERS, requestParams); String response = liveImportClient.executeHttpGetRequest(timeout, url, params); + if (StringUtils.isEmpty(response)) { + return results; + } List elements = splitToRecords(response); for (Element record : elements) { results.add(transformSourceRecords(record)); @@ -377,6 +391,8 @@ private Map getRequestParameters(String query, String viewMode, private List splitToRecords(String recordsSrc) { try { SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(recordsSrc)); Element root = document.getRootElement(); List records = root.getChildren("entry",Namespace.getNamespace("http://www.w3.org/2005/Atom")); diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/ImportService.java b/dspace-api/src/main/java/org/dspace/importer/external/service/ImportService.java index 28df30b345bc..a444a3609c15 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/service/ImportService.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/ImportService.java @@ -311,7 +311,7 @@ public boolean canImportFromFile(String originalName) { /* * Get a collection of record from File, * The first match will be return. - * + * * @param file The file from which will read records * @param originalName The original file name or full path * @return a single record contains the metadatum @@ -319,28 +319,83 @@ public boolean canImportFromFile(String originalName) { */ public ImportRecord getRecord(File file, String originalName) throws FileMultipleOccurencesException, FileSourceException { - ImportRecord importRecords = null; - for (MetadataSource metadataSource : importSources.values()) { - try (InputStream fileInputStream = new FileInputStream(file)) { - if (metadataSource instanceof FileSource) { - FileSource fileSource = (FileSource)metadataSource; - if (fileSource.isValidSourceForFile(originalName)) { - importRecords = fileSource.getRecord(fileInputStream); - break; + try (InputStream fileInputStream = new FileInputStream(file)) { + FileSource fileSource = this.getFileSource(fileInputStream, originalName); + try { + if (fileSource.isValidSourceForFile(originalName)) { + return fileSource.getRecord(fileInputStream); + } + } catch (FileSourceException e) { + log.debug(fileSource.getImportSource() + " isn't a valid parser for file", e); + } + //catch statements is required because we could have supported format (i.e. XML) + //which fail on schema validation + } catch (FileMultipleOccurencesException e) { + log.debug("File contains multiple metadata, return with error"); + throw e; + } catch (IOException e1) { + throw new FileSourceException("File cannot be read, may be null"); + } + return null; + } + + /** + * Get a collection of record from File, + * + * @param file The file from which will read records + * @param originalName The original file name or full path + * @return records containing metdatum + * @throws FileMultipleOccurencesException if the import configured for the {@code file} + * doesn't allow multiple records import. + * @throws FileSourceException if the file cannot be read. + */ + public List getRecords(File file, String originalName) + throws FileMultipleOccurencesException, FileSourceException { + try (InputStream fileInputStream = new FileInputStream(file)) { + FileSource fileSource = this.getFileSource(fileInputStream, originalName); + try { + if (fileSource.isValidSourceForFile(originalName)) { + List records = fileSource.getRecords(fileInputStream); + if (!fileSource.canImportMultipleRecords() && records.size() > 1) { + throw new FileMultipleOccurencesException( + "Found " + records.size() + " entries in file ( " + + originalName + + " ) but import source ( " + + fileSource.getImportSource() + + " ) not allowed to import multiple records" + ); } + return records; } + } catch (FileSourceException e) { + log.debug(fileSource.getImportSource() + " isn't a valid parser for file", e); + } //catch statements is required because we could have supported format (i.e. XML) //which fail on schema validation - } catch (FileSourceException e) { - log.debug(metadataSource.getImportSource() + " isn't a valid parser for file"); - } catch (FileMultipleOccurencesException e) { - log.debug("File contains multiple metadata, return with error"); - throw e; - } catch (IOException e1) { - throw new FileSourceException("File cannot be read, may be null"); + } catch (IOException e1) { + throw new FileSourceException("File cannot be read, may be null"); + } + return null; + } + + protected FileSource getFileSource(File file, String originalName) throws FileSourceException { + try (InputStream fileInputStream = new FileInputStream(file)) { + return getFileSource(file, originalName); + } catch (IOException e1) { + throw new FileSourceException("File cannot be read, may be null"); + } + } + + protected FileSource getFileSource(InputStream fileInputStream, String originalName) { + for (MetadataSource metadataSource : importSources.values()) { + if (metadataSource instanceof FileSource) { + FileSource fileSource = (FileSource)metadataSource; + if (fileSource.isValidSourceForFile(originalName)) { + return fileSource; + } } } - return importRecords; + return null; } /** diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractPlainMetadataSource.java b/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractPlainMetadataSource.java index 5d83b9a7cce4..b58f69b6665c 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractPlainMetadataSource.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractPlainMetadataSource.java @@ -41,7 +41,7 @@ public abstract class AbstractPlainMetadataSource /** * Set the file extensions supported by this metadata service - * + * * @param supportedExtensions the file extensions (xml,txt,...) supported by this service */ public void setSupportedExtensions(List supportedExtensions) { @@ -64,6 +64,9 @@ public List getSupportedExtensions() { @Override public List getRecords(InputStream is) throws FileSourceException { List datas = readData(is); + if (datas == null) { + return List.of(); + } List records = new ArrayList<>(); for (PlainMetadataSourceDto item : datas) { records.add(toRecord(item)); diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractRemoteMetadataSource.java b/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractRemoteMetadataSource.java index 38632a1a2b72..29801433e3b3 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractRemoteMetadataSource.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractRemoteMetadataSource.java @@ -183,6 +183,7 @@ protected T retry(Callable callable) throws MetadataSourceException { log.warn("Error in trying operation " + operationId + " " + retry + " " + warning + ", retrying !", e); } finally { + this.lastRequest = System.currentTimeMillis(); lock.unlock(); } @@ -262,5 +263,7 @@ protected void throwSourceExceptionHook() { */ public abstract void init() throws Exception; - + public void setInterRequestTime(final long interRequestTime) { + this.interRequestTime = interRequestTime; + } } diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java b/dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java index 801f5474bb4e..fffd476a69ee 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java @@ -52,7 +52,7 @@ public ImportRecord getRecord(InputStream inputStream) /** * This method is used to decide if the FileSource manage the file format - * + * * @param originalName the file file original name * @return true if the FileSource can parse the file, false otherwise */ @@ -67,4 +67,13 @@ public default boolean isValidSourceForFile(String originalName) { return false; } + /** + * This method is used to determine if we can import multiple records at once placed in the same source file. + * + * @return true if allowed to import multiple records in the same file, false otherwise + */ + public default boolean canImportMultipleRecords() { + return false; + } + } diff --git a/dspace-api/src/main/java/org/dspace/importer/external/vufind/VuFindImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/vufind/VuFindImportMetadataSourceServiceImpl.java index a4f90fa5ba61..1b942a7f1525 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/vufind/VuFindImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/vufind/VuFindImportMetadataSourceServiceImpl.java @@ -144,6 +144,9 @@ public Integer call() throws Exception { uriBuilder.addParameter("lookfor", query.getParameterAsClass("query", String.class)); Map> params = new HashMap>(); String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + if (StringUtils.isEmpty(responseString)) { + return 0; + } JsonNode node = convertStringJsonToJsonNode(responseString); JsonNode resultCountNode = node.get("resultCount"); return resultCountNode.intValue(); @@ -182,8 +185,7 @@ public String call() throws Exception { } } Map> params = new HashMap>(); - String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); - return response; + return liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); } } @@ -309,6 +311,9 @@ private JsonNode convertStringJsonToJsonNode(String json) { private List extractMetadataFromRecordList(String records) { List recordsResult = new ArrayList<>(); + if (StringUtils.isEmpty(records)) { + return recordsResult; + } JsonNode jsonNode = convertStringJsonToJsonNode(records); JsonNode node = jsonNode.get("records"); if (Objects.nonNull(node) && node.isArray()) { diff --git a/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSImportMetadataSourceServiceImpl.java index 2ccdc12b8db2..a2e8e221f894 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSImportMetadataSourceServiceImpl.java @@ -145,8 +145,13 @@ public Integer call() throws Exception { Map> params = new HashMap>(); params.put(HEADER_PARAMETERS, getRequestParameters()); String response = liveImportClient.executeHttpGetRequest(timeout, url, params); + if (StringUtils.isEmpty(response)) { + return 0; + } SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); Element root = document.getRootElement(); XPathExpression xpath = XPathFactory.instance().compile("//*[@name=\"RecordsFound\"]", @@ -179,6 +184,9 @@ public List call() throws Exception { Map> params = new HashMap>(); params.put(HEADER_PARAMETERS, getRequestParameters()); String response = liveImportClient.executeHttpGetRequest(timeout, urlString, params); + if (StringUtils.isEmpty(response)) { + return results; + } List elements = splitToRecords(response); for (Element record : elements) { @@ -226,6 +234,9 @@ public List call() throws Exception { String url = urlSearch + URLEncoder.encode(queryString, StandardCharsets.UTF_8) + "&count=" + count + "&firstRecord=" + (start + 1); String response = liveImportClient.executeHttpGetRequest(timeout, url, params); + if (StringUtils.isEmpty(response)) { + return results; + } List omElements = splitToRecords(response); for (Element el : omElements) { @@ -285,6 +296,8 @@ private boolean isIsi(String query) { private List splitToRecords(String recordsSrc) { try { SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(recordsSrc)); Element root = document.getRootElement(); String cData = XPathFactory.instance().compile("//*[@name=\"Records\"]", diff --git a/dspace-api/src/main/java/org/dspace/layout/CrisLayoutBox.java b/dspace-api/src/main/java/org/dspace/layout/CrisLayoutBox.java index 47b426b135c1..9fb9a725c5b7 100644 --- a/dspace-api/src/main/java/org/dspace/layout/CrisLayoutBox.java +++ b/dspace-api/src/main/java/org/dspace/layout/CrisLayoutBox.java @@ -11,6 +11,7 @@ import java.util.HashSet; import java.util.List; import java.util.Set; +import java.util.stream.Collectors; import javax.persistence.Cacheable; import javax.persistence.CascadeType; import javax.persistence.Column; @@ -79,13 +80,8 @@ public class CrisLayoutBox implements ReloadableEntity { ) private Set metadataSecurityFields = new HashSet<>(); - @ManyToMany(fetch = FetchType.LAZY) - @JoinTable( - name = "cris_layout_box2securitygroup", - joinColumns = {@JoinColumn(name = "box_id")}, - inverseJoinColumns = {@JoinColumn(name = "group_id")} - ) - private Set groupSecurityFields = new HashSet<>(); + @OneToMany(fetch = FetchType.LAZY, mappedBy = "box", cascade = CascadeType.ALL, orphanRemoval = true) + private Set box2SecurityGroups = new HashSet<>(); @OneToMany(fetch = FetchType.LAZY, mappedBy = "box", cascade = CascadeType.ALL) @OrderBy(value = "row, cell, priority") @@ -288,20 +284,19 @@ public void setContainer(Boolean container) { this.container = container; } - public void setGroupSecurityFields(Set groupSecurityFields) { - this.groupSecurityFields = groupSecurityFields; - } - - public void addGroupSecurityFields(Set groupSecurityFields) { - this.groupSecurityFields.addAll(groupSecurityFields); + public Set getGroupSecurityFields() { + return box2SecurityGroups.stream() + .map(crisLayoutBox2SecurityGroup -> + crisLayoutBox2SecurityGroup.getGroup()) + .collect(Collectors.toSet()); } - public void addGroupSecurityFields(Group group) { - this.groupSecurityFields.add(group); + public Set getBox2SecurityGroups() { + return box2SecurityGroups; } - public Set getGroupSecurityFields() { - return groupSecurityFields; + public void setBox2SecurityGroups(Set box2SecurityGroups) { + this.box2SecurityGroups = box2SecurityGroups; } @Override diff --git a/dspace-api/src/main/java/org/dspace/layout/CrisLayoutBox2SecurityGroup.java b/dspace-api/src/main/java/org/dspace/layout/CrisLayoutBox2SecurityGroup.java new file mode 100644 index 000000000000..d0ee1cd58415 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/layout/CrisLayoutBox2SecurityGroup.java @@ -0,0 +1,124 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.layout; + +import java.io.Serializable; +import javax.persistence.CascadeType; +import javax.persistence.Embeddable; +import javax.persistence.EmbeddedId; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; +import javax.persistence.MapsId; +import javax.persistence.Table; + +import org.dspace.eperson.Group; + +@Entity +@Table(name = "cris_layout_box2securitygroup") +public class CrisLayoutBox2SecurityGroup implements Serializable { + + @Embeddable + public static class CrisLayoutBox2SecurityGroupId implements Serializable { + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "box_id") + private CrisLayoutBox boxId; + + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "group_id") + private Group groupId; + + public CrisLayoutBox2SecurityGroupId() { + + } + + public CrisLayoutBox2SecurityGroupId(CrisLayoutBox boxId, Group groupId) { + this.boxId = boxId; + this.groupId = groupId; + } + + public CrisLayoutBox getBoxId() { + return boxId; + } + + public void setBoxId(CrisLayoutBox boxId) { + this.boxId = boxId; + } + + public Group getGroupId() { + return groupId; + } + + public void setGroupId(Group groupId) { + this.groupId = groupId; + } + } + + @EmbeddedId + private CrisLayoutBox2SecurityGroupId id; + + @ManyToOne(fetch = FetchType.LAZY) + @MapsId("boxId") + @JoinColumn(name = "box_id", insertable = false, updatable = false) + private CrisLayoutBox box; + + @ManyToOne(fetch = FetchType.LAZY) + @MapsId("groupId") + @JoinColumn(name = "group_id", insertable = false, updatable = false) + private Group group; + + @ManyToOne(fetch = FetchType.LAZY, cascade = CascadeType.ALL) + @JoinColumn(name = "alternative_box_id", nullable = true) + private CrisLayoutBox alternativeBox; + + public CrisLayoutBox2SecurityGroup() { + + } + + public CrisLayoutBox2SecurityGroup(CrisLayoutBox2SecurityGroupId id, + CrisLayoutBox box, Group group, + CrisLayoutBox alternativeBox) { + this.id = id; + this.box = box; + this.group = group; + this.alternativeBox = alternativeBox; + } + + public CrisLayoutBox2SecurityGroupId getId() { + return id; + } + + public void setId(CrisLayoutBox2SecurityGroupId id) { + this.id = id; + } + + public CrisLayoutBox getBox() { + return box; + } + + public void setBox(CrisLayoutBox box) { + this.box = box; + } + + public Group getGroup() { + return group; + } + + public void setGroup(Group group) { + this.group = group; + } + + public CrisLayoutBox getAlternativeBox() { + return alternativeBox; + } + + public void setAlternativeBox(CrisLayoutBox alternativeBox) { + this.alternativeBox = alternativeBox; + } +} diff --git a/dspace-api/src/main/java/org/dspace/layout/CrisLayoutBoxTypes.java b/dspace-api/src/main/java/org/dspace/layout/CrisLayoutBoxTypes.java index ad4be3ddc8b5..198d00636431 100644 --- a/dspace-api/src/main/java/org/dspace/layout/CrisLayoutBoxTypes.java +++ b/dspace-api/src/main/java/org/dspace/layout/CrisLayoutBoxTypes.java @@ -11,5 +11,6 @@ public enum CrisLayoutBoxTypes { IIIFVIEWER, METADATA, RELATION, - METRICS + METRICS, + COLLECTIONS } diff --git a/dspace-api/src/main/java/org/dspace/layout/CrisLayoutTab.java b/dspace-api/src/main/java/org/dspace/layout/CrisLayoutTab.java index 4f938ca69aec..9c0f4ef1e2b9 100644 --- a/dspace-api/src/main/java/org/dspace/layout/CrisLayoutTab.java +++ b/dspace-api/src/main/java/org/dspace/layout/CrisLayoutTab.java @@ -79,19 +79,16 @@ public class CrisLayoutTab implements ReloadableEntity { @Column(name = "security") private Integer security; + @Column(name = "custom_filter") + private String customFilter; + @ManyToMany(fetch = FetchType.LAZY) @JoinTable(name = "cris_layout_tab2securitymetadata", joinColumns = { @JoinColumn(name = "tab_id") }, inverseJoinColumns = { @JoinColumn(name = "metadata_field_id") }) private Set metadataSecurityFields = new HashSet<>(); - @ManyToMany(fetch = FetchType.LAZY) - @JoinTable( - name = "cris_layout_tab2securitygroup", - joinColumns = {@JoinColumn(name = "tab_id")}, - inverseJoinColumns = {@JoinColumn(name = "group_id")} - ) - private Set groupSecurityFields = new HashSet<>(); - + @OneToMany(fetch = FetchType.LAZY, mappedBy = "tab", cascade = CascadeType.ALL) + private Set tab2SecurityGroups = new HashSet<>(); @Column(name = "is_leading") private Boolean leading; @@ -180,6 +177,14 @@ public void setSecurity(Integer security) { this.security = security; } + public String getCustomFilter() { + return customFilter; + } + + public void setCustomFilter(String customFilter) { + this.customFilter = customFilter; + } + public Set getMetadataSecurityFields() { return metadataSecurityFields; } @@ -219,20 +224,19 @@ public List getBoxes() { .collect(Collectors.toList()); } - public void setGroupSecurityFields(Set groupSecurityFields) { - this.groupSecurityFields = groupSecurityFields; - } - - public void addGroupSecurityFields(Set groupSecurityFields) { - this.groupSecurityFields.addAll(groupSecurityFields); + public Set getGroupSecurityFields() { + return tab2SecurityGroups.stream() + .map(crisLayoutTab2SecurityGroup -> + crisLayoutTab2SecurityGroup.getGroup()) + .collect(Collectors.toSet()); } - public void addGroupSecurityFields(Group group) { - this.groupSecurityFields.add(group); + public Set getTab2SecurityGroups() { + return tab2SecurityGroups; } - public Set getGroupSecurityFields() { - return groupSecurityFields; + public void setTab2SecurityGroups(Set tab2SecurityGroups) { + this.tab2SecurityGroups = tab2SecurityGroups; } @Override diff --git a/dspace-api/src/main/java/org/dspace/layout/CrisLayoutTab2SecurityGroup.java b/dspace-api/src/main/java/org/dspace/layout/CrisLayoutTab2SecurityGroup.java new file mode 100644 index 000000000000..f41b3ec53e88 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/layout/CrisLayoutTab2SecurityGroup.java @@ -0,0 +1,124 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.layout; + +import java.io.Serializable; +import javax.persistence.CascadeType; +import javax.persistence.Embeddable; +import javax.persistence.EmbeddedId; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; +import javax.persistence.MapsId; +import javax.persistence.Table; + +import org.dspace.eperson.Group; + +@Entity +@Table(name = "cris_layout_tab2securitygroup") +public class CrisLayoutTab2SecurityGroup implements Serializable { + + @Embeddable + public static class CrisLayoutTab2SecurityGroupId implements Serializable { + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "tab_id") + private CrisLayoutTab tabId; + + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "group_id") + private Group groupId; + + public CrisLayoutTab2SecurityGroupId() { + + } + + public CrisLayoutTab2SecurityGroupId(CrisLayoutTab tabId, Group groupId) { + this.tabId = tabId; + this.groupId = groupId; + } + + public CrisLayoutTab getTabId() { + return tabId; + } + + public void setTabId(CrisLayoutTab tabId) { + this.tabId = tabId; + } + + public Group getGroupId() { + return groupId; + } + + public void setGroupId(Group groupId) { + this.groupId = groupId; + } + } + + @EmbeddedId + private CrisLayoutTab2SecurityGroupId id; + + @ManyToOne(fetch = FetchType.LAZY) + @MapsId("tabId") + @JoinColumn(name = "tab_id", insertable = false, updatable = false) + private CrisLayoutTab tab; + + @ManyToOne(fetch = FetchType.LAZY) + @MapsId("groupId") + @JoinColumn(name = "group_id", insertable = false, updatable = false) + private Group group; + + @ManyToOne(fetch = FetchType.LAZY, cascade = CascadeType.ALL) + @JoinColumn(name = "alternative_tab_id") + private CrisLayoutTab alternativeTab; + + public CrisLayoutTab2SecurityGroup() { + + } + + public CrisLayoutTab2SecurityGroup(CrisLayoutTab2SecurityGroupId id, + CrisLayoutTab tab, Group group, + CrisLayoutTab alternativeTab) { + this.id = id; + this.tab = tab; + this.group = group; + this.alternativeTab = alternativeTab; + } + + public CrisLayoutTab2SecurityGroupId getId() { + return id; + } + + public void setId(CrisLayoutTab2SecurityGroupId id) { + this.id = id; + } + + public CrisLayoutTab getTab() { + return tab; + } + + public void setTab(CrisLayoutTab tab) { + this.tab = tab; + } + + public Group getGroup() { + return group; + } + + public void setGroup(Group group) { + this.group = group; + } + + public CrisLayoutTab getAlternativeTab() { + return alternativeTab; + } + + public void setAlternativeTab(CrisLayoutTab alternativeTab) { + this.alternativeTab = alternativeTab; + } +} diff --git a/dspace-api/src/main/java/org/dspace/layout/dao/CrisLayoutTabDAO.java b/dspace-api/src/main/java/org/dspace/layout/dao/CrisLayoutTabDAO.java index 195b53c06f36..944e7c34715e 100644 --- a/dspace-api/src/main/java/org/dspace/layout/dao/CrisLayoutTabDAO.java +++ b/dspace-api/src/main/java/org/dspace/layout/dao/CrisLayoutTabDAO.java @@ -65,6 +65,17 @@ public interface CrisLayoutTabDAO extends GenericDAO { public List findByEntityTypeAndEagerlyFetchBoxes(Context context, String entityType) throws SQLException; + /** + * Returns all tabs in database filtered by entity type {@link EntityType} + * @param context The relevant DSpace Context + * @param entityType entity type label {@link EntityType} + * @param customFilter specialized entity type label {@link CrisLayoutTab#getCustomFilter()} + * @return List of CrisLayoutTab {@link CrisLayoutTab} + * @throws SQLException An exception that provides information on a database errors. + */ + public List findByEntityTypeAndEagerlyFetchBoxes(Context context, + String entityType, String customFilter) throws SQLException; + /** * Returns all tabs in database filtered by entity type {@link EntityType} * @param context The relevant DSpace Context @@ -75,7 +86,7 @@ public List findByEntityTypeAndEagerlyFetchBoxes(Context context, * @throws SQLException An exception that provides information on a database errors. */ public List findByEntityTypeAndEagerlyFetchBoxes( - Context context, String entityType, Integer limit, Integer offset) throws SQLException; + Context context, String entityType, String customFilter, Integer limit, Integer offset) throws SQLException; /** * Returns the total number of metadata field associated at tab diff --git a/dspace-api/src/main/java/org/dspace/layout/dao/impl/CrisLayoutTabDAOImpl.java b/dspace-api/src/main/java/org/dspace/layout/dao/impl/CrisLayoutTabDAOImpl.java index f1d34a3f74f5..bd898d8e1b40 100644 --- a/dspace-api/src/main/java/org/dspace/layout/dao/impl/CrisLayoutTabDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/layout/dao/impl/CrisLayoutTabDAOImpl.java @@ -10,14 +10,17 @@ import static org.dspace.layout.CrisLayoutTab.ROWS_AND_CONTENT_GRAPH; import java.sql.SQLException; +import java.util.ArrayList; import java.util.List; import javax.persistence.EntityGraph; import javax.persistence.TypedQuery; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Join; +import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; +import org.apache.commons.lang3.StringUtils; import org.dspace.content.EntityType; import org.dspace.content.EntityType_; import org.dspace.content.MetadataField; @@ -62,24 +65,42 @@ public Long countTotal(Context context) throws SQLException { return getHibernateSession(context).createQuery(cc).getSingleResult(); } + /* (non-Javadoc) + * @see org.dspace.layout.dao.CrisLayoutTabDAO#findByEntityType(java.lang.String) + */ + @Override + public List findByEntityTypeAndEagerlyFetchBoxes(Context context, + String entityType, String customFilter) throws SQLException { + return findByEntityTypeAndEagerlyFetchBoxes(context, entityType, customFilter, null, null); + } + /* (non-Javadoc) * @see org.dspace.layout.dao.CrisLayoutTabDAO#findByEntityType(java.lang.String) */ @Override public List findByEntityTypeAndEagerlyFetchBoxes(Context context, String entityType) throws SQLException { - return findByEntityTypeAndEagerlyFetchBoxes(context, entityType, null, null); + return findByEntityTypeAndEagerlyFetchBoxes(context, entityType, null, null, null); } @Override public List findByEntityTypeAndEagerlyFetchBoxes(Context context, String entityType, - Integer limit, Integer offset) throws SQLException { + String customFilter, Integer limit, Integer offset) throws SQLException { CriteriaBuilder cb = getCriteriaBuilder(context); CriteriaQuery query = cb.createQuery(CrisLayoutTab.class); Root tabRoot = query.from(CrisLayoutTab.class); + List andPredicates = new ArrayList<>(); + + andPredicates.add(cb.equal(tabRoot.get(CrisLayoutTab_.entity).get(EntityType_.LABEL), entityType)); + if (StringUtils.isNotBlank(customFilter)) { + andPredicates.add(cb.equal(tabRoot.get(CrisLayoutTab_.CUSTOM_FILTER), customFilter)); + } else { + andPredicates.add(cb.isNull((tabRoot.get(CrisLayoutTab_.CUSTOM_FILTER)))); + } + query - .where(cb.equal(tabRoot.get(CrisLayoutTab_.entity).get(EntityType_.LABEL), entityType)) + .where(andPredicates.toArray(new Predicate[] {})) .orderBy(cb.asc(tabRoot.get(CrisLayoutTab_.PRIORITY))); TypedQuery typedQuery = getHibernateSession(context).createQuery(query); diff --git a/dspace-api/src/main/java/org/dspace/layout/script/service/CrisLayoutToolValidator.java b/dspace-api/src/main/java/org/dspace/layout/script/service/CrisLayoutToolValidator.java index c1a9cff5dbb4..74302960cff5 100644 --- a/dspace-api/src/main/java/org/dspace/layout/script/service/CrisLayoutToolValidator.java +++ b/dspace-api/src/main/java/org/dspace/layout/script/service/CrisLayoutToolValidator.java @@ -108,6 +108,8 @@ public interface CrisLayoutToolValidator { String GROUP_COLUMN = "GROUP"; + String ALTERNATIVE_TO_COLUMN = "ALTERNATIVE_TO"; + String METADATA_TYPE = "METADATA"; String BITSTREAM_TYPE = "BITSTREAM"; diff --git a/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolConverterImpl.java b/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolConverterImpl.java index 1aec1c349372..52ba3ddedc16 100644 --- a/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolConverterImpl.java +++ b/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolConverterImpl.java @@ -31,13 +31,14 @@ import org.apache.poi.ss.usermodel.Workbook; import org.apache.poi.ss.usermodel.WorkbookFactory; import org.dspace.content.MetadataField; -import org.dspace.eperson.Group; import org.dspace.layout.CrisLayoutBox; +import org.dspace.layout.CrisLayoutBox2SecurityGroup; import org.dspace.layout.CrisLayoutCell; import org.dspace.layout.CrisLayoutField; import org.dspace.layout.CrisLayoutFieldBitstream; import org.dspace.layout.CrisLayoutMetric2Box; import org.dspace.layout.CrisLayoutTab; +import org.dspace.layout.CrisLayoutTab2SecurityGroup; import org.dspace.layout.CrisMetadataGroup; import org.dspace.layout.LayoutSecurity; import org.dspace.layout.script.service.CrisLayoutToolConverter; @@ -247,9 +248,9 @@ private void buildTabPolicy(Workbook workbook, CrisLayoutTab tab) { buildTabPolicyMetadataSecurityFieldRow(sheet, tab, metadataField) ); - tab.getGroupSecurityFields() - .forEach(group -> - buildTabPolicyGroupSecurityFieldRow(sheet, tab, group) + tab.getTab2SecurityGroups() + .forEach(tab2SecurityGroup -> + buildTabPolicyGroupSecurityFieldRow(sheet, tab, tab2SecurityGroup) ); } @@ -259,14 +260,18 @@ private void buildTabPolicyMetadataSecurityFieldRow(Sheet sheet, CrisLayoutTab t createCell(row, 1, tab.getShortName()); createCell(row, 2, metadataField.toString('.')); createCell(row, 3, ""); + createCell(row, 4, ""); } - private void buildTabPolicyGroupSecurityFieldRow(Sheet sheet, CrisLayoutTab tab, Group group) { + private void buildTabPolicyGroupSecurityFieldRow(Sheet sheet, CrisLayoutTab tab, + CrisLayoutTab2SecurityGroup tab2SecurityGroup) { + CrisLayoutTab alternativeTab = tab2SecurityGroup.getAlternativeTab(); Row row = sheet.createRow(sheet.getLastRowNum() + 1); createCell(row, 0, tab.getEntity().getLabel()); createCell(row, 1, tab.getShortName()); createCell(row, 2, ""); - createCell(row, 3, group.getName()); + createCell(row, 3, tab2SecurityGroup.getGroup().getName()); + createCell(row, 4, alternativeTab == null ? "" : alternativeTab.getShortName()); } private void buildBoxPolicy(Workbook workbook, List boxes) { @@ -277,9 +282,9 @@ private void buildBoxPolicy(Workbook workbook, List boxes) { buildBoxPolicyMetadataSecurityFieldRow(sheet, box, metadataField) ); - box.getGroupSecurityFields() - .forEach(group -> - buildBoxPolicyGroupSecurityFieldRow(sheet, box, group) + box.getBox2SecurityGroups() + .forEach(box2SecurityGroup -> + buildBoxPolicyGroupSecurityFieldRow(sheet, box, box2SecurityGroup) ); }); } @@ -290,14 +295,19 @@ private void buildBoxPolicyMetadataSecurityFieldRow(Sheet sheet, CrisLayoutBox b createCell(row, 1, box.getShortname()); createCell(row, 2, metadataField.toString('.')); createCell(row, 3, ""); + createCell(row, 4, ""); } - private void buildBoxPolicyGroupSecurityFieldRow(Sheet sheet, CrisLayoutBox box, Group group) { + private void buildBoxPolicyGroupSecurityFieldRow(Sheet sheet, CrisLayoutBox box, + CrisLayoutBox2SecurityGroup box2SecurityGroup) { + + CrisLayoutBox alternativeBox = box2SecurityGroup.getAlternativeBox(); Row row = sheet.createRow(sheet.getLastRowNum() + 1); createCell(row, 0, box.getCell().getRow().getTab().getEntity().getLabel()); createCell(row, 1, box.getShortname()); createCell(row, 2, ""); - createCell(row, 3, group.getName()); + createCell(row, 3, box2SecurityGroup.getGroup().getName()); + createCell(row, 4, alternativeBox == null ? "" : alternativeBox.getShortname()); } private String convertToString(boolean value) { diff --git a/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolParserImpl.java b/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolParserImpl.java index e8ace7e91008..a4f5fec248ef 100644 --- a/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolParserImpl.java +++ b/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolParserImpl.java @@ -7,6 +7,7 @@ */ package org.dspace.layout.script.service.impl; +import static org.dspace.layout.script.service.CrisLayoutToolValidator.ALTERNATIVE_TO_COLUMN; import static org.dspace.layout.script.service.CrisLayoutToolValidator.BITSTREAM_TYPE; import static org.dspace.layout.script.service.CrisLayoutToolValidator.BOX2METADATA_SHEET; import static org.dspace.layout.script.service.CrisLayoutToolValidator.BOX2METRICS_SHEET; @@ -53,6 +54,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; @@ -76,6 +78,7 @@ import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; import org.dspace.layout.CrisLayoutBox; +import org.dspace.layout.CrisLayoutBox2SecurityGroup; import org.dspace.layout.CrisLayoutBoxTypes; import org.dspace.layout.CrisLayoutCell; import org.dspace.layout.CrisLayoutField; @@ -84,6 +87,7 @@ import org.dspace.layout.CrisLayoutMetric2Box; import org.dspace.layout.CrisLayoutRow; import org.dspace.layout.CrisLayoutTab; +import org.dspace.layout.CrisLayoutTab2SecurityGroup; import org.dspace.layout.CrisMetadataGroup; import org.dspace.layout.LayoutSecurity; import org.dspace.layout.script.service.CrisLayoutToolParser; @@ -110,9 +114,21 @@ public class CrisLayoutToolParserImpl implements CrisLayoutToolParser { @Override public List parse(Context context, Workbook workbook) { Sheet tabSheet = getSheetByName(workbook, TAB_SHEET); - return WorkbookUtils.getNotEmptyRowsSkippingHeader(tabSheet).stream() - .map(row -> buildTab(context, row)) - .collect(Collectors.toList()); + List tabs = + WorkbookUtils.getNotEmptyRowsSkippingHeader(tabSheet).stream() + .map(row -> buildTab(context, row)) + .collect(Collectors.toList()); + + tabs.forEach(tab -> { + tab.setTab2SecurityGroups(buildTab2SecurityGroups(context, + workbook, TAB_POLICY_SHEET, tab.getEntity().getLabel(), tab.getShortName(), tab, tabs)); + + tab.getBoxes().forEach(box -> + box.setBox2SecurityGroups(buildBox2SecurityGroups(context, + workbook, BOX_POLICY_SHEET, box.getEntitytype().getLabel(), box.getShortname(), box, tabs))); + }); + + return tabs; } private CrisLayoutTab buildTab(Context context, Row tabRow) { @@ -120,9 +136,14 @@ private CrisLayoutTab buildTab(Context context, Row tabRow) { Workbook workbook = tabRow.getSheet().getWorkbook(); String name = getCellValue(tabRow, SHORTNAME_COLUMN); - String entityType = getCellValue(tabRow, ENTITY_COLUMN); + String entityColumn = getCellValue(tabRow, ENTITY_COLUMN); + + int index = entityColumn.indexOf("."); + String customFilter = (index > 0 && index < entityColumn.length()) ? entityColumn.substring(index + 1) : null; + String entityType = (index > 0) ? entityColumn.substring(0, index) : entityColumn; tab.setEntity(getEntityType(context, entityType)); + tab.setCustomFilter(customFilter); tab.setShortName(name); tab.setHeader(getCellValue(tabRow, LABEL_COLUMN)); tab.setLeading(toBoolean(getCellValue(tabRow, LEADING_COLUMN))); @@ -131,8 +152,6 @@ private CrisLayoutTab buildTab(Context context, Row tabRow) { buildTabRows(context, workbook, entityType, name).forEach(tab::addRow); tab.setMetadataSecurityFields(buildMetadataSecurityField(context, workbook, TAB_POLICY_SHEET, entityType, name)); - tab.setGroupSecurityFields(buildGroupSecurityField(context, workbook, - TAB_POLICY_SHEET, entityType, name)); return tab; } @@ -172,7 +191,7 @@ private CrisLayoutCell buildCell(Context context, Row tab2boxRow) { private List buildBoxes(Context context, Row tab2boxRow) { - String entityType = getCellValue(tab2boxRow, ENTITY_COLUMN); + String entityType = getEntityValue(tab2boxRow, ENTITY_COLUMN); String boxes = getCellValue(tab2boxRow, BOXES_COLUMN); if (StringUtils.isBlank(boxes)) { @@ -213,8 +232,6 @@ private CrisLayoutBox buildBox(Context context, Sheet boxSheet, String entityTyp box.setStyle(getCellValue(boxRow, STYLE_COLUMN)); box.setMetadataSecurityFields(buildMetadataSecurityField(context, workbook, BOX_POLICY_SHEET, entityType, boxName)); - box.setGroupSecurityFields(buildGroupSecurityField(context, workbook, - BOX_POLICY_SHEET, entityType, boxName)); if (boxType.equals(CrisLayoutBoxTypes.METADATA.name())) { buildCrisLayoutFields(context, workbook, entityType, boxName).forEach(box::addLayoutField); @@ -281,7 +298,7 @@ private CrisLayoutField buildMetadataGroupField(Context context, Row row) { private List buildCrisMetadataGroups(Context context, Row row) { String metadataField = getCellValue(row, METADATA_COLUMN); - String entity = getCellValue(row, ENTITY_COLUMN); + String entity = getEntityValue(row, ENTITY_COLUMN); Sheet metadatagroupsSheet = getSheetByName(row.getSheet().getWorkbook(), METADATAGROUPS_SHEET); @@ -371,10 +388,111 @@ private Set buildGroupSecurityField(Context context, Workbook workbook, .collect(Collectors.toSet()); } + private Set buildBox2SecurityGroups(Context context, Workbook workbook, + String sheetName, String entity, String name, + CrisLayoutBox crisLayoutBox, + List tabs) { + Sheet sheet = getSheetByName(workbook, sheetName); + Set box2SecurityGroups = new HashSet<>(); + + getRowsByEntityAndColumnValue(sheet, entity, SHORTNAME_COLUMN, name) + .forEach(row -> { + String groupName = getCellValue(row, GROUP_COLUMN); + String alternativeBox = getCellValue(row, ALTERNATIVE_TO_COLUMN); + + if (StringUtils.isNotBlank(groupName)) { + Group group = getGroupField(context, groupName); + if (group != null) { + box2SecurityGroups.add( + buildBox2SecurityGroup(group, crisLayoutBox, entity, alternativeBox, tabs) + ); + } + } + }); + + return box2SecurityGroups; + } + + private CrisLayoutBox2SecurityGroup buildBox2SecurityGroup(Group group, CrisLayoutBox box, + String entity, + String alternativeBox, List tabs) { + + CrisLayoutBox2SecurityGroup.CrisLayoutBox2SecurityGroupId box2SecurityGroupId = + new CrisLayoutBox2SecurityGroup.CrisLayoutBox2SecurityGroupId(box, group); + + return new CrisLayoutBox2SecurityGroup(box2SecurityGroupId, box, group, + findAlternativeBox(alternativeBox, entity, tabs)); + } + + private CrisLayoutBox findAlternativeBox(String alternativeBox, String entityType, List tabs) { + + if (alternativeBox == null) { + return null; + } + + return tabs.stream() + .flatMap(tab -> tab.getBoxes().stream()) + .filter(crisLayoutBox -> crisLayoutBox.getShortname().equals(alternativeBox) && + crisLayoutBox.getEntitytype().getLabel().equals(entityType)) + .findFirst() + .orElseThrow(() -> new RuntimeException("Alternative box not found for shortname: " + + alternativeBox + ", entityType: " + entityType)); + } + + private Set buildTab2SecurityGroups(Context context, Workbook workbook, + String sheetName, String entity, String name, + CrisLayoutTab crisLayoutTab, + List tabs) { + Sheet sheet = getSheetByName(workbook, sheetName); + Set tab2SecurityGroups = new HashSet<>(); + + getRowsByEntityAndColumnValue(sheet, entity, SHORTNAME_COLUMN, name) + .forEach(row -> { + String groupName = getCellValue(row, GROUP_COLUMN); + String alternativeTab = getCellValue(row, ALTERNATIVE_TO_COLUMN); + + if (StringUtils.isNotBlank(groupName)) { + Group group = getGroupField(context, groupName); + if (group != null) { + tab2SecurityGroups.add( + buildTab2SecurityGroup(group, crisLayoutTab, entity, alternativeTab, tabs) + ); + } + } + }); + + return tab2SecurityGroups; + } + + private CrisLayoutTab2SecurityGroup buildTab2SecurityGroup(Group group, CrisLayoutTab tab, + String entity, + String alternativeTab, List tabs) { + + CrisLayoutTab2SecurityGroup.CrisLayoutTab2SecurityGroupId tab2SecurityGroupId = + new CrisLayoutTab2SecurityGroup.CrisLayoutTab2SecurityGroupId(tab, group); + + return new CrisLayoutTab2SecurityGroup(tab2SecurityGroupId, tab, group, + findAlternativeTab(alternativeTab, entity, tabs)); + } + + private CrisLayoutTab findAlternativeTab(String alternativeTab, String entityType, List tabs) { + + if (alternativeTab == null) { + return null; + } + + return tabs.stream() + .filter(crisLayoutTab -> crisLayoutTab.getShortName().equals(alternativeTab) && + crisLayoutTab.getEntity().getLabel().equals(entityType)) + .findFirst() + .orElseThrow(() -> new RuntimeException("Alternative tab not found for shortname: " + + alternativeTab + ", entityType: " + entityType)); + } + private Stream getRowsByEntityAndColumnValue(Sheet sheet, String entity, String columnName, String value) { return WorkbookUtils.getNotEmptyRowsSkippingHeader(sheet).stream() .filter(row -> value.equals(getCellValue(row, columnName))) - .filter(row -> entity.equals(getCellValue(row, ENTITY_COLUMN))); + .filter(row -> entity.equals(getEntityValue(row, ENTITY_COLUMN))); } private boolean toBoolean(String value) { @@ -389,6 +507,14 @@ private Integer toInteger(String value) { } } + private String getEntityValue(Row row, String header) { + String cellValue = WorkbookUtils.getCellValue(row, header); + return Optional.ofNullable(cellValue) + .filter(cell -> cell.contains(".")) + .map(cell -> cell.split("\\.")[0]) + .orElse(StringUtils.isNotBlank(cellValue) ? cellValue : null); + } + private String getCellValue(Row row, String header) { String cellValue = WorkbookUtils.getCellValue(row, header); return StringUtils.isNotBlank(cellValue) ? cellValue : null; diff --git a/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolValidatorImpl.java b/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolValidatorImpl.java index a3e45858d789..d6d4e6be745a 100644 --- a/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolValidatorImpl.java +++ b/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolValidatorImpl.java @@ -11,6 +11,8 @@ import static org.dspace.util.WorkbookUtils.getCellIndexFromHeaderName; import static org.dspace.util.WorkbookUtils.getCellValue; import static org.dspace.util.WorkbookUtils.getColumnWithoutHeader; +import static org.dspace.util.WorkbookUtils.getEntityTypeCellValue; +import static org.dspace.util.WorkbookUtils.getEntityTypeValue; import static org.dspace.util.WorkbookUtils.getNotEmptyRowsSkippingHeader; import java.sql.SQLException; @@ -456,7 +458,7 @@ private void validatePresenceInBoxSheet(CrisLayoutToolValidationResult result, S int entityTypeColumn, int nameColumn) { for (Row row : getNotEmptyRowsSkippingHeader(sheet)) { - String entityType = getCellValue(row, entityTypeColumn); + String entityType = getEntityTypeCellValue(row, entityTypeColumn); String name = getCellValue(row, nameColumn); if (isNotPresentOnSheet(sheet.getWorkbook(), BOX_SHEET, entityType, name)) { result.addError("The box with name " + name + @@ -490,7 +492,7 @@ private void validatePresenceInTab2BoxSheet(CrisLayoutToolValidationResult resul } for (Row row : getNotEmptyRowsSkippingHeader(sheet)) { - String entityType = getCellValue(row, entityTypeColumn); + String entityType = getEntityTypeCellValue(row, entityTypeColumn); String shortname = getCellValue(row, shortnameColumn); if (isNotPresentOnTab2Box(tab2boxSheet, columnName, entityType, shortname)) { result.addWarning("The " + sheet.getSheetName() + " with name " + shortname + @@ -507,7 +509,7 @@ private void validateTab2BoxRowReferences(Row row, CrisLayoutToolValidationResul Sheet tab2boxSheet = row.getSheet(); - String entityType = getCellValue(row, entityTypeColumn); + String entityType = getEntityTypeCellValue(row, entityTypeColumn); String tab = getCellValue(row, tabColumn); String[] boxes = splitByCommaAndTrim(getCellValue(row, boxesColumn)); @@ -560,7 +562,7 @@ private void validateRowStyleColumn(Sheet sheet, String containerColumnName, continue; } - String entityType = getCellValue(row, entityTypeColumn); + String entityType = getEntityTypeCellValue(row, entityTypeColumn); String container = getCellValue(row, containerColumn); String rowCount = getCellValue(row, rowColumn); @@ -585,7 +587,7 @@ private List findSameRowsWithDifferentStyle(Sheet sheet, String entity, .filter(sheetRow -> excelRowNum != sheetRow.getRowNum()) .filter(sheetRow -> row.equals(getCellValue(sheetRow, rowColumn))) .filter(sheetRow -> container.equals(getCellValue(sheetRow, containerColumn))) - .filter(sheetRow -> entity.equals(getCellValue(sheetRow, entityTypeColumn))) + .filter(sheetRow -> entity.equals(getEntityTypeCellValue(sheetRow, entityTypeColumn))) .filter(sheetRow -> hasDifferentStyle(sheetRow, rowStyleColumn, style)) .map(Row::getRowNum) .collect(Collectors.toList()); @@ -633,7 +635,8 @@ private boolean sameEntityTypeAndName(Row row, int entityTypeColumn, String enti int nameColumn, String name) { String[] namesOnColumn = splitByCommaAndTrim(getCellValue(row, nameColumn)); - return entityType.equals(getCellValue(row, entityTypeColumn)) && ArrayUtils.contains(namesOnColumn, name); + return entityType.equals(getEntityTypeCellValue(row, entityTypeColumn)) + && ArrayUtils.contains(namesOnColumn, name); } @@ -641,8 +644,11 @@ private void validateEntityTypes(CrisLayoutToolValidationResult result, Sheet sh int entityColumn, List allEntityTypes) { for (Cell entityTypeCell : getColumnWithoutHeader(sheet, entityColumn)) { - String entityType = WorkbookUtils.getCellValue(entityTypeCell); - if (!allEntityTypes.contains(entityType)) { + String entityType = getCellValue(entityTypeCell); + if ( + !allEntityTypes.contains(entityType) && + !allEntityTypes.contains(getEntityTypeValue(entityTypeCell)) + ) { result.addError("The " + sheet.getSheetName() + " contains an unknown entity type '" + entityType + "' at row " + entityTypeCell.getRowIndex()); } diff --git a/dspace-api/src/main/java/org/dspace/layout/service/CrisLayoutTabAccessService.java b/dspace-api/src/main/java/org/dspace/layout/service/CrisLayoutTabAccessService.java index 12d7d08084e9..2679d34865df 100644 --- a/dspace-api/src/main/java/org/dspace/layout/service/CrisLayoutTabAccessService.java +++ b/dspace-api/src/main/java/org/dspace/layout/service/CrisLayoutTabAccessService.java @@ -32,6 +32,5 @@ public interface CrisLayoutTabAccessService { * @return true if access has to be granded, false otherwise * @throws SQLException in case of error during database access */ - boolean hasAccess(Context context, EPerson user, CrisLayoutTab tab, Item item) - throws SQLException; + boolean hasAccess(Context context, EPerson user, CrisLayoutTab tab, Item item); } diff --git a/dspace-api/src/main/java/org/dspace/layout/service/CrisLayoutTabService.java b/dspace-api/src/main/java/org/dspace/layout/service/CrisLayoutTabService.java index dc1980e7894b..919dc7eb4310 100644 --- a/dspace-api/src/main/java/org/dspace/layout/service/CrisLayoutTabService.java +++ b/dspace-api/src/main/java/org/dspace/layout/service/CrisLayoutTabService.java @@ -94,22 +94,25 @@ public CrisLayoutTab create(Context context, EntityType eType, Integer priority) * Returns all tabs in the database filtered by entity type {@link EntityType} * @param context The relevant DSpace Context * @param entityType label of entity type {@link EntityType} + * @param customFilter label of specialized entity type {@link CrisLayoutTab#getCustomFilter()} * @return List of CrisLayoutTab {@link CrisLayoutTab} * @throws SQLException An exception that provides information on a database errors. */ - public List findByEntityType(Context context, String entityType) throws SQLException; + public List findByEntityType(Context context, String entityType, String customFilter) + throws SQLException; /** * Returns all tabs in database filtered by entity type {@link EntityType} * @param context The relevant DSpace Context * @param entityType entity type label + * @param customFilter label of specialized entity type {@link CrisLayoutTab#getCustomFilter()} * @param limit how many results return * @param offset the position of the first result to return * @return List of CrisLayoutTab {@link CrisLayoutTab} * @throws SQLException An exception that provides information on a database errors. */ - public List findByEntityType(Context context, String entityType, Integer limit, Integer offset) - throws SQLException; + public List findByEntityType(Context context, String entityType, String customFilter, Integer limit, + Integer offset) throws SQLException; /** * Returns the total number of tabs with a specific entity type @@ -149,4 +152,6 @@ public List getMetadataField(Context context, Integer tabId, Inte * @throws SQLException An exception that provides information on a database errors. */ public List findByItem(Context context, String itemUuid) throws SQLException; + + public boolean hasAccess(Context context, CrisLayoutTab tab, Item item); } diff --git a/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutBoxServiceImpl.java b/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutBoxServiceImpl.java index a1bc95b818c3..acd5d38c0b41 100644 --- a/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutBoxServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutBoxServiceImpl.java @@ -159,6 +159,8 @@ public boolean hasContent(Context context, CrisLayoutBox box, Item item) { return hasRelationBoxContent(context, box, item); case "METRICS": return hasMetricsBoxContent(context, box, item); + case "COLLECTIONS": + return isOwningCollectionPresent(item); case "IIIFVIEWER": return isIiifEnabled(item); case "METADATA": @@ -206,7 +208,13 @@ private boolean isMetadataFieldPresent(DSpaceObject item, MetadataField metadata } private boolean isBitstreamPresent(Context context, Item item, CrisLayoutFieldBitstream field) { - Map filters = Map.of(field.getMetadataField().toString('.'), field.getMetadataValue()); + + Map filters = Map.of(); + + if (field.getMetadataField() != null) { + filters = Map.of(field.getMetadataField().toString('.'), field.getMetadataValue()); + } + try { return bitstreamService.findShowableByItem(context, item.getID(), field.getBundle(), filters).size() > 0; } catch (SQLException e) { @@ -246,6 +254,10 @@ private boolean isIiifEnabled(Item item) { new MetadataFieldName("dspace.iiif.enabled"), Item.ANY)); } + private boolean isOwningCollectionPresent(Item item) { + return Objects.nonNull(item.getOwningCollection()); + } + private boolean currentUserIsNotAllowedToReadItem(Context context, Item item) { try { return !authorizeService.authorizeActionBoolean(context, item, Constants.READ); diff --git a/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabAccessServiceImpl.java b/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabAccessServiceImpl.java index 71e20fd883fa..331c5df679be 100644 --- a/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabAccessServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabAccessServiceImpl.java @@ -31,12 +31,14 @@ public CrisLayoutTabAccessServiceImpl(LayoutSecurityService layoutSecurityServic } @Override - public boolean hasAccess(Context context, EPerson user, CrisLayoutTab tab, Item item) throws SQLException { - return layoutSecurityService.hasAccess(LayoutSecurity.valueOf(tab.getSecurity()), - context, - user, - tab.getMetadataSecurityFields(), - tab.getGroupSecurityFields(), - item); + public boolean hasAccess(Context context, EPerson user, CrisLayoutTab tab, Item item) { + try { + return layoutSecurityService.hasAccess( + LayoutSecurity.valueOf(tab.getSecurity()), context, user, tab.getMetadataSecurityFields(), + tab.getGroupSecurityFields(), item + ); + } catch (SQLException e) { + throw new RuntimeException(e); + } } } diff --git a/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java b/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java index 0758853d4dd2..980305e67d6e 100644 --- a/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java @@ -7,13 +7,19 @@ */ package org.dspace.layout.service.impl; +import static org.dspace.util.FunctionalUtils.throwingMapperWrapper; + import java.sql.SQLException; import java.util.Collections; import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.UUID; +import javax.annotation.PostConstruct; import org.apache.commons.collections.CollectionUtils; +import org.dspace.app.util.SubmissionConfigReader; +import org.dspace.app.util.SubmissionConfigReaderException; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.EntityType; @@ -23,7 +29,9 @@ import org.dspace.core.Context; import org.dspace.layout.CrisLayoutTab; import org.dspace.layout.dao.CrisLayoutTabDAO; +import org.dspace.layout.service.CrisLayoutTabAccessService; import org.dspace.layout.service.CrisLayoutTabService; +import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; /** @@ -42,6 +50,19 @@ public class CrisLayoutTabServiceImpl implements CrisLayoutTabService { @Autowired private ItemService itemService; + @Autowired + private ConfigurationService configurationService; + + private SubmissionConfigReader submissionConfigReader; + + @Autowired + CrisLayoutTabAccessService crisLayoutTabAccessService; + + @PostConstruct + private void setup() throws SubmissionConfigReaderException { + submissionConfigReader = new SubmissionConfigReader(); + } + @Override public CrisLayoutTab create(Context c, CrisLayoutTab tab) throws SQLException, AuthorizeException { if (!authorizeService.isAdmin(c)) { @@ -135,8 +156,9 @@ public Long countTotal(Context context) throws SQLException { * @see org.dspace.layout.service.CrisLayoutTabService#findByEntityType(org.dspace.core.Context, java.lang.String) */ @Override - public List findByEntityType(Context context, String entityType) throws SQLException { - return dao.findByEntityTypeAndEagerlyFetchBoxes(context, entityType); + public List findByEntityType(Context context, String entityType, String customFilter) + throws SQLException { + return dao.findByEntityTypeAndEagerlyFetchBoxes(context, entityType, customFilter); } /* (non-Javadoc) @@ -144,9 +166,9 @@ public List findByEntityType(Context context, String entityType) * (org.dspace.core.Context, java.lang.String, java.lang.Integer, java.lang.Integer) */ @Override - public List findByEntityType(Context context, String entityType, Integer limit, Integer offset) - throws SQLException { - return dao.findByEntityTypeAndEagerlyFetchBoxes(context, entityType, limit, offset); + public List findByEntityType(Context context, String entityType, String customFilter, Integer limit, + Integer offset) throws SQLException { + return dao.findByEntityTypeAndEagerlyFetchBoxes(context, entityType, customFilter, limit, offset); } /* (non-Javadoc) @@ -171,12 +193,57 @@ public Long totalMetadataField(Context context, Integer tabId) throws SQLExcepti @Override public List findByItem(Context context, String itemUuid) throws SQLException { Item item = Objects.requireNonNull(itemService.find(context, UUID.fromString(itemUuid)), - "The itemUuid entered does not match with any item"); - String entityType = itemService.getMetadata(item, "dspace.entity.type"); - if (entityType == null) { + "The itemUuid entered does not match with any item"); + + String entityTypeValue = itemService.getMetadata(item, "dspace.entity.type"); + String submissionName = getSubmissionDefinitionName(item); + + List layoutTabs = + Optional.ofNullable(this.configurationService.getProperty("dspace.metadata.layout.tab")) + .map(metadataField -> this.itemService.getMetadataByMetadataString(item, metadataField)) + .filter(metadatas -> !metadatas.isEmpty()) + .map(metadatas -> metadatas.get(0)) + .map(metadata -> + findValidEntityType(context, entityTypeValue, submissionName + "." + + metadata.getAuthority()) + .orElse( + findValidEntityType(context, entityTypeValue, submissionName + "." + + metadata.getValue()) + .orElse(findValidEntityType(context, entityTypeValue, metadata.getAuthority()) + .orElse(findValidEntityType(context, entityTypeValue, metadata.getValue()) + .orElse(null)))) + ) + .orElse(findValidEntityType(context, entityTypeValue, submissionName) + .orElse(findByEntityType(context, entityTypeValue, null))); + if (layoutTabs == null) { return Collections.emptyList(); } - return findByEntityType(context, entityType); + return layoutTabs; + } + + @Override + public boolean hasAccess(Context context, CrisLayoutTab tab, Item item) { + return crisLayoutTabAccessService.hasAccess(context, context.getCurrentUser(), tab, item); + } + + private String getSubmissionDefinitionName(Item item) { + if (submissionConfigReader == null || item.getOwningCollection() == null) { + return ""; + } + + return submissionConfigReader.getSubmissionConfigByCollection(item.getOwningCollection()).getSubmissionName(); + } + + private Optional> findValidEntityType(Context context, String entityTypeValue, + String customFilter) { + return Optional.ofNullable(customFilter) + .map( + throwingMapperWrapper( + value -> findByEntityType(context, entityTypeValue, value), + null + ) + ) + .filter(tabs -> tabs != null && !tabs.isEmpty()); } } diff --git a/dspace-api/src/main/java/org/dspace/metrics/MetricsExternalServices.java b/dspace-api/src/main/java/org/dspace/metrics/MetricsExternalServices.java index b680e43eeab7..79543f43ce1e 100644 --- a/dspace-api/src/main/java/org/dspace/metrics/MetricsExternalServices.java +++ b/dspace-api/src/main/java/org/dspace/metrics/MetricsExternalServices.java @@ -118,4 +118,8 @@ public int getFetchSize() { public void setFetchSize(int fetchSize) { this.fetchSize = fetchSize; } + + public List getLogs() { + return null; + } } diff --git a/dspace-api/src/main/java/org/dspace/metrics/UpdateCrisMetricsWithExternalSource.java b/dspace-api/src/main/java/org/dspace/metrics/UpdateCrisMetricsWithExternalSource.java index 12877252791b..82c690e62caa 100644 --- a/dspace-api/src/main/java/org/dspace/metrics/UpdateCrisMetricsWithExternalSource.java +++ b/dspace-api/src/main/java/org/dspace/metrics/UpdateCrisMetricsWithExternalSource.java @@ -13,6 +13,7 @@ import java.sql.SQLException; import java.util.HashMap; import java.util.Iterator; +import java.util.List; import java.util.Map; import java.util.UUID; import java.util.function.Function; @@ -108,6 +109,7 @@ public void internalRun() throws Exception { performUpdate(externalService); context.complete(); } catch (Exception e) { + getLogsFromMetricService(externalService); log.error(e.getMessage(), e); handler.handleException(e); context.abort(); @@ -184,6 +186,7 @@ private void performUpdateWithMultiFetch(MetricsExternalServices metricsServices long updatedItems = metricsServices.updateMetric(context, itemIterator, param); + getLogsFromMetricService(metricsServices); handler.logInfo("Updated " + updatedItems + " metrics"); handler.logInfo("Update end"); @@ -216,6 +219,7 @@ private void performUpdateWithSingleFetches(MetricsExternalServices metricsServi } context.commit(); + getLogsFromMetricService(metricsServices); handler.logInfo("Found " + countFoundItems + " items"); handler.logInfo("Updated " + countUpdatedItems + " metrics"); handler.logInfo("Update end"); @@ -240,4 +244,10 @@ private void assignSpecialGroupsInContext() throws SQLException { } } + private void getLogsFromMetricService(MetricsExternalServices metricsServices) { + List metricLogger = metricsServices.getLogs(); + if (metricLogger != null) { + metricLogger.forEach(message -> handler.logInfo(message)); + } + } } diff --git a/dspace-api/src/main/java/org/dspace/metrics/scopus/ScopusProvider.java b/dspace-api/src/main/java/org/dspace/metrics/scopus/ScopusProvider.java index cf3eb875eb74..9bafca2d5169 100644 --- a/dspace-api/src/main/java/org/dspace/metrics/scopus/ScopusProvider.java +++ b/dspace-api/src/main/java/org/dspace/metrics/scopus/ScopusProvider.java @@ -40,9 +40,15 @@ public class ScopusProvider { private static final Logger log = LogManager.getLogger(ScopusProvider.class); + private List logsCache = new ArrayList<>(); + @Autowired private ScopusRestConnector scopusRestConnector; + public List getLogs() { + return logsCache; + } + /** *

      * This methods fetch a list of metrics using the {@code id} param, @@ -54,6 +60,7 @@ public class ScopusProvider { * @return List of CrisMetrics fetched */ public List getScopusList(String id) { + logsCache = new ArrayList<>(); String scopusResponse = getRecords(id); if (StringUtils.isNotBlank(scopusResponse)) { List crisMetricList = mapToCrisMetricList(scopusResponse); @@ -66,7 +73,7 @@ public List getScopusList(String id) { } return crisMetricList; } - log.error("The query : " + id + " is wrong!"); + logAndCache("The query : " + id + " is wrong!"); return List.of(); } @@ -75,7 +82,7 @@ public CrisMetricDTO getScopusObject(String id) { if (StringUtils.isNotBlank(scopusResponse)) { return mapToCrisMetric(scopusResponse); } - log.error("The query : " + id + " is wrong!"); + logAndCache("The query : " + id + " is wrong!"); return null; } @@ -94,7 +101,7 @@ private CrisMetricDTO mapToCrisMetric(String scopusResponse) { docBuilder = docBuilderFactory.newDocumentBuilder(); parsedResponse = docBuilder.parse(new InputSource(new StringReader(scopusResponse))); } catch (ParserConfigurationException | SAXException | IOException e) { - log.error(e.getMessage(), e); + logAndCacheError(e); } return mapToCrisMetric(parsedResponse); } @@ -107,7 +114,7 @@ private List mapToCrisMetricList(String scopusResponse) { docBuilder = docBuilderFactory.newDocumentBuilder(); parsedResponse = docBuilder.parse(new InputSource(new StringReader(scopusResponse))); } catch (ParserConfigurationException | SAXException | IOException e) { - log.error(e.getMessage(), e); + logAndCacheError(e); } return mapToCrisMetricList(parsedResponse); } @@ -134,7 +141,7 @@ private String getNext(String scopusResponse) { .map(element -> element.getAttribute("href")) .orElse(null); } catch (ParserConfigurationException | SAXException | IOException e) { - log.error(e.getMessage(), e); + logAndCacheError(e); } return nextUrl; } @@ -148,7 +155,7 @@ private List mapToCrisMetricList(Document doc) { .filter(Objects::nonNull) .collect(Collectors.toList()); } catch (Exception e) { - log.error(e.getMessage(), e); + logAndCacheError(e); } return scopusCitationList; } @@ -162,7 +169,7 @@ private CrisMetricDTO mapToCrisMetric(Document doc) { .map(this::mapToCrisMetric) .orElse(null); } catch (Exception e) { - log.error(e.getMessage(), e); + logAndCacheError(e); } return scopusCitation; } @@ -170,13 +177,13 @@ private CrisMetricDTO mapToCrisMetric(Document doc) { private CrisMetricDTO mapToCrisMetric(Element dataRoot) { CrisMetricDTO scopusCitation = new CrisMetricDTO(); if (dataRoot == null) { - log.debug("No citation entry found in Scopus"); + logAndCache("No citation entry found in Scopus"); return scopusCitation; } Element errorScopusResp = XMLUtils.getSingleElement(dataRoot, "error"); if (errorScopusResp != null) { - log.debug("Error citation entry found in Scopus: " + errorScopusResp.getTextContent()); + logAndCache("Error citation entry found in Scopus: " + errorScopusResp.getTextContent()); return scopusCitation; } @@ -203,10 +210,25 @@ private CrisMetricDTO mapToCrisMetric(Element dataRoot) { try { scopusCitation.setMetricCount(Double.valueOf(numCitations)); } catch (NullPointerException | NumberFormatException ex) { - log.error("Error while trying to parse numCitations:" + numCitations); + logAndCacheErrorWithMessage("Error while trying to parse numCitations:" + numCitations, ex); } scopusCitation.setRemark(scopusCitation.buildMetricsRemark()); return scopusCitation; } + private void logAndCache(String message) { + logsCache.add("INFO: " + message); + log.debug(message); + } + + private void logAndCacheErrorWithMessage(String message, Throwable e) { + logsCache.add("ERROR: " + message + '\n' + e.getMessage()); + log.error(message, e); + } + + private void logAndCacheError(Throwable e) { + logsCache.add("ERROR: " + e.getMessage()); + log.error(e.getMessage(), e); + } + } \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/metrics/scopus/UpdateScopusMetrics.java b/dspace-api/src/main/java/org/dspace/metrics/scopus/UpdateScopusMetrics.java index bd11feb99d3b..782f6f832fb2 100644 --- a/dspace-api/src/main/java/org/dspace/metrics/scopus/UpdateScopusMetrics.java +++ b/dspace-api/src/main/java/org/dspace/metrics/scopus/UpdateScopusMetrics.java @@ -42,6 +42,8 @@ public class UpdateScopusMetrics extends MetricsExternalServices { public static final String SCOPUS_CITATION = "scopusCitation"; + private List logsCache = new ArrayList<>(); + @Autowired private ScopusProvider scopusProvider; @@ -61,6 +63,10 @@ public List getFilters() { return Arrays.asList("dspace.entity.type:Publication", "dc.identifier.doi:* OR dc.identifier.pmid:*"); } + public List getLogs() { + return logsCache; + } + @Override public boolean updateMetric(Context context, Item item, String param) { String id = buildQuery(item); @@ -76,16 +82,20 @@ public long updateMetric(Context context, Iterator itemIterator, String pa long updatedItems = 0; long foundItems = 0; long apiCalls = 0; + logsCache = new ArrayList<>(); try { while (itemIterator.hasNext()) { Map queryMap = new HashMap<>(); List itemList = new ArrayList<>(); for (int i = 0; i < fetchSize && itemIterator.hasNext(); i++) { Item item = itemIterator.next(); + logAndCache("Adding item with uuid: " + item.getID()); setLastImportMetadataValue(context, item); itemList.add(item); } foundItems += itemList.size(); + String id = this.generateQuery(queryMap, itemList); + logAndCache("Getting scopus metrics for " + id); updatedItems += scopusProvider.getScopusList(this.generateQuery(queryMap, itemList)) .stream() @@ -102,11 +112,11 @@ public long updateMetric(Context context, Iterator itemIterator, String pa context.commit(); } } catch (SQLException e) { - log.error("Error while updating scopus' metrics", e); - throw new RuntimeException(e.getMessage(), e); + logAndCacheError("Error while updating scopus' metrics", e); } finally { - log.info("Found and fetched {} with {} api calls!", foundItems, apiCalls); + logAndCache("Found and fetched " + foundItems + " with " + apiCalls + " api calls!"); } + logsCache.addAll(scopusProvider.getLogs()); return updatedItems; } @@ -213,6 +223,7 @@ private boolean updateScopusMetrics(Context context, Item currentItem, CrisMetri createNewScopusMetrics(context,currentItem, scopusMetric, deltaPeriod1, deltaPeriod2); } catch (SQLException | AuthorizeException e) { + logsCache.add(e.getMessage()); log.error(e.getMessage(), e); } return true; @@ -236,4 +247,14 @@ private Double getDeltaPeriod(CrisMetricDTO currentMetric, Optional } return null; } + + private void logAndCache(String message) { + logsCache.add("INFO: " + message); + log.info(message); + } + + private void logAndCacheError(String message, Throwable e) { + logsCache.add("ERROR: " + message + '\n' + e.getMessage()); + log.error(message, e); + } } diff --git a/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java b/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java index 33edea112e76..07a79384c77c 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java +++ b/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java @@ -79,6 +79,8 @@ public class OrcidHistory implements ReloadableEntity { /** * A description of the synchronized resource. */ + @Lob + @Type(type = "org.hibernate.type.TextType") @Column(name = "description") private String description; @@ -87,7 +89,7 @@ public class OrcidHistory implements ReloadableEntity { * the owner itself. */ @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "metadata") private String metadata; @@ -102,7 +104,7 @@ public class OrcidHistory implements ReloadableEntity { * The response message incoming from ORCID. */ @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "response_message") private String responseMessage; diff --git a/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java b/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java index 4794e89008c3..65b66cd20c3e 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java +++ b/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java @@ -64,6 +64,8 @@ public class OrcidQueue implements ReloadableEntity { /** * A description of the resource to be synchronized. */ + @Lob + @Type(type = "org.hibernate.type.TextType") @Column(name = "description") private String description; @@ -87,7 +89,7 @@ public class OrcidQueue implements ReloadableEntity { */ @Lob @Column(name = "metadata") - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") private String metadata; /** diff --git a/dspace-api/src/main/java/org/dspace/orcid/consumer/OrcidQueueConsumer.java b/dspace-api/src/main/java/org/dspace/orcid/consumer/OrcidQueueConsumer.java index 97605429d9cd..869dc452c7d8 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/consumer/OrcidQueueConsumer.java +++ b/dspace-api/src/main/java/org/dspace/orcid/consumer/OrcidQueueConsumer.java @@ -14,9 +14,10 @@ import static org.apache.commons.collections.CollectionUtils.isNotEmpty; import java.sql.SQLException; -import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Optional; +import java.util.Set; import java.util.UUID; import java.util.stream.Stream; @@ -30,6 +31,7 @@ import org.dspace.content.service.ItemService; import org.dspace.core.Context; import org.dspace.core.CrisConstants; +import org.dspace.core.exception.SQLRuntimeException; import org.dspace.event.Consumer; import org.dspace.event.Event; import org.dspace.orcid.OrcidHistory; @@ -73,7 +75,7 @@ public class OrcidQueueConsumer implements Consumer { private ConfigurationService configurationService; - private List alreadyConsumedItems = new ArrayList<>(); + private Set itemsToConsume = new HashSet<>(); @Override public void initialize() throws Exception { @@ -107,16 +109,26 @@ public void consume(Context context, Event event) throws Exception { return; } - if (alreadyConsumedItems.contains(item.getID())) { - return; - } + itemsToConsume.add(item.getID()); + } + + @Override + public void end(Context context) throws Exception { + + for (UUID itemId : itemsToConsume) { + + Item item = itemService.find(context, itemId); + + context.turnOffAuthorisationSystem(); + try { + consumeItem(context, item); + } finally { + context.restoreAuthSystemState(); + } - context.turnOffAuthorisationSystem(); - try { - consumeItem(context, item); - } finally { - context.restoreAuthSystemState(); } + + itemsToConsume.clear(); } private void consumeItem(Context context, Item item) throws SQLException { @@ -132,7 +144,7 @@ private void consumeItem(Context context, Item item) throws SQLException { consumeProfile(context, item); } - alreadyConsumedItems.add(item.getID()); + itemsToConsume.add(item.getID()); } @@ -162,6 +174,10 @@ private void consumeEntity(Context context, Item entity) throws SQLException { continue; } + if (isNotLatestVersion(context, entity)) { + continue; + } + orcidQueueService.create(context, relatedItem, entity); } @@ -297,6 +313,14 @@ private boolean isNotProfileItem(Item profileItemItem) { return !getProfileType().equals(itemService.getEntityTypeLabel(profileItemItem)); } + private boolean isNotLatestVersion(Context context, Item entity) { + try { + return !itemService.isLatestVersion(context, entity); + } catch (SQLException e) { + throw new SQLRuntimeException(e); + } + } + private boolean isNestedMetadataPlaceholder(MetadataValue metadata) { return StringUtils.equals(metadata.getValue(), CrisConstants.PLACEHOLDER_PARENT_METADATA_VALUE); } @@ -322,11 +346,6 @@ private boolean isOrcidSynchronizationDisabled() { return !configurationService.getBooleanProperty("orcid.synchronization-enabled", true); } - @Override - public void end(Context context) throws Exception { - alreadyConsumedItems.clear(); - } - @Override public void finish(Context context) throws Exception { // nothing to do diff --git a/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidQueueDAO.java b/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidQueueDAO.java index 235443b15033..b7e0b1ed2a85 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidQueueDAO.java +++ b/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidQueueDAO.java @@ -74,6 +74,16 @@ public List findByProfileItemAndEntity(Context context, Item profile */ public List findByProfileItemOrEntity(Context context, Item item) throws SQLException; + /** + * Get the OrcidQueue records where the given item is the entity. + * + * @param context DSpace context object + * @param item the item to search for + * @return the found OrcidQueue entities + * @throws SQLException if database error + */ + public List findByEntity(Context context, Item item) throws SQLException; + /** * Find all the OrcidQueue records with the given entity and record type. * diff --git a/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidQueueDAOImpl.java b/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidQueueDAOImpl.java index 2114b2535759..8e941b056535 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidQueueDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidQueueDAOImpl.java @@ -63,6 +63,13 @@ public List findByProfileItemOrEntity(Context context, Item item) th return query.getResultList(); } + @Override + public List findByEntity(Context context, Item item) throws SQLException { + Query query = createQuery(context, "FROM OrcidQueue WHERE entity.id = :itemId"); + query.setParameter("itemId", item.getID()); + return query.getResultList(); + } + @Override public List findByEntityAndRecordType(Context context, Item entity, String type) throws SQLException { Query query = createQuery(context, "FROM OrcidQueue WHERE entity = :entity AND recordType = :type"); diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/AbstractOrcidProfileSectionFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/AbstractOrcidProfileSectionFactory.java index 2c272e620cca..98f63193a964 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/AbstractOrcidProfileSectionFactory.java +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/AbstractOrcidProfileSectionFactory.java @@ -34,6 +34,8 @@ public abstract class AbstractOrcidProfileSectionFactory implements OrcidProfile protected final OrcidProfileSyncPreference preference; + protected Integer maxAllowedMetadataVisibility = 0; + @Autowired protected ItemService itemService; @@ -70,4 +72,17 @@ protected List getMetadataValues(Item item, String metadataField) return itemService.getMetadataByMetadataString(item, metadataField); } + protected boolean isAllowedMetadataByVisibility(MetadataValue metadataValue) { + return metadataValue.getSecurityLevel() == null + || metadataValue.getSecurityLevel() <= getMaxAllowedMetadataVisibility(); + } + + public Integer getMaxAllowedMetadataVisibility() { + return maxAllowedMetadataVisibility; + } + + public void setMaxAllowedMetadataVisibility(Integer maxAllowedMetadataVisibility) { + this.maxAllowedMetadataVisibility = maxAllowedMetadataVisibility; + } + } diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidAffiliationFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidAffiliationFactory.java index 89a1ca3d83e4..5b325a44a37a 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidAffiliationFactory.java +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidAffiliationFactory.java @@ -19,6 +19,7 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import java.util.stream.Stream; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.StringUtils; @@ -56,9 +57,9 @@ public OrcidAffiliationFactory(OrcidProfileSectionType sectionType, OrcidProfile @Override public List getMetadataFields() { - return List.of(organizationField, roleField, startDateField, endDateField).stream() - .filter(StringUtils::isNotBlank) - .collect(Collectors.toList()); + return Stream.of(organizationField, roleField, startDateField, endDateField) + .filter(StringUtils::isNotBlank) + .collect(Collectors.toList()); } @Override @@ -80,20 +81,29 @@ public Object create(Context context, List metadataValues) { orcidCommonObjectFactory.createFuzzyDate(endDate).ifPresent(affiliation::setEndDate); affiliation.setRoleTitle(isUnprocessableValue(role) ? null : role.getValue()); - orcidCommonObjectFactory.createOrganization(context, organization).ifPresent(affiliation::setOrganization); + orcidCommonObjectFactory.createOrganization(context, organization).ifPresent(org -> { + affiliation.setOrganization(org); + affiliation.setDepartmentName(org.getName()); + }); return affiliation; } @Override public List getMetadataSignatures(Context context, Item item) { - List signatures = new ArrayList(); + List signatures = new ArrayList<>(); Map> metadataGroups = getMetadataGroups(item); int groupSize = metadataGroups.getOrDefault(organizationField, Collections.emptyList()).size(); for (int currentGroupIndex = 0; currentGroupIndex < groupSize; currentGroupIndex++) { List metadataValues = getMetadataValueByPlace(metadataGroups, currentGroupIndex); - signatures.add(metadataSignatureGenerator.generate(context, metadataValues)); + //only "visible" metadatavalues within this group + metadataValues = metadataValues.stream() + .filter(this::isAllowedMetadataByVisibility) + .collect(Collectors.toList()); + if (!metadataValues.isEmpty()) { + signatures.add(metadataSignatureGenerator.generate(context, metadataValues)); + } } return signatures; @@ -162,7 +172,7 @@ private Map> getMetadataGroups(Item item) { } private List getMetadataValueByPlace(Map> metadataGroups, int place) { - List metadataValues = new ArrayList(); + List metadataValues = new ArrayList<>(); for (String metadataField : metadataGroups.keySet()) { List nestedMetadataValues = metadataGroups.get(metadataField); if (nestedMetadataValues.size() > place) { diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidCommonObjectFactoryImpl.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidCommonObjectFactoryImpl.java index 48cda3b3da20..8d92e72b80c0 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidCommonObjectFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidCommonObjectFactoryImpl.java @@ -121,7 +121,9 @@ public Optional createOrganization(Context context, MetadataValue Item organizationItem = findRelatedItem(context, metadataValue); if (organizationItem != null) { organization.setAddress(createOrganizationAddress(organizationItem)); - organization.setDisambiguatedOrganization(createDisambiguatedOrganization(organizationItem)); + organization.setDisambiguatedOrganization( + createDisambiguatedOrganization(context, organizationItem) + ); } return of(organization); @@ -156,7 +158,7 @@ public Optional createFundingContributor(Context context, Me FundingContributor contributor = new FundingContributor(); contributor.setCreditName(new CreditName(metadataValue.getValue())); - contributor.setContributorAttributes(getFundingContributorAttributes(metadataValue, role)); + contributor.setContributorAttributes(getFundingContributorAttributes(role)); Item authorItem = findItem(context, UUIDUtils.fromString(metadataValue.getAuthority())); if (authorItem != null) { @@ -190,7 +192,7 @@ public Optional createCountry(Context context, MetadataValue metadataVa throw new OrcidValidationException(OrcidValidationError.INVALID_COUNTRY); } - return country.map(isoCountry -> new Country(isoCountry)); + return country.map(Country::new); } private ContributorAttributes getContributorAttributes(MetadataValue metadataValue, ContributorRole role) { @@ -211,8 +213,7 @@ private OrganizationAddress createOrganizationAddress(Item organizationItem) { return address; } - private FundingContributorAttributes getFundingContributorAttributes(MetadataValue metadataValue, - FundingContributorRole role) { + private FundingContributorAttributes getFundingContributorAttributes(FundingContributorRole role) { FundingContributorAttributes attributes = new FundingContributorAttributes(); attributes.setContributorRole(role != null ? role.value() : null); return attributes; @@ -237,11 +238,23 @@ private DisambiguatedOrganization createDisambiguatedOrganization(Item organizat return null; } + private DisambiguatedOrganization createDisambiguatedOrganization(Context context, Item organizationItem) { + DisambiguatedOrganization disambiguatedOrganization = createDisambiguatedOrganization(organizationItem); + Item parentOrganization = findParentOrganization(context, organizationItem); + + while (disambiguatedOrganization == null && parentOrganization != null) { + disambiguatedOrganization = createDisambiguatedOrganization(parentOrganization); + parentOrganization = findParentOrganization(context, parentOrganization); + } + + return disambiguatedOrganization; + } + private Optional convertToIso3166Country(String countryValue) { return ofNullable(countryValue) .map(value -> countryConverter != null ? countryConverter.getValue(value) : value) .filter(value -> isValidEnum(Iso3166Country.class, value)) - .map(value -> Iso3166Country.fromValue(value)); + .map(Iso3166Country::fromValue); } private boolean isUnprocessableValue(MetadataValue value) { @@ -249,6 +262,19 @@ private boolean isUnprocessableValue(MetadataValue value) { || value.getValue().equals(PLACEHOLDER_PARENT_METADATA_VALUE); } + private Item findParentOrganization(Context context, Item item) { + try { + Optional metadataValue = + itemService.getMetadataByMetadataString(item, "organization.parentOrganization") + .stream().findFirst(); + return metadataValue.isPresent() + ? itemService.find(context, UUIDUtils.fromString(metadataValue.get().getAuthority())) + : null; + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + private Item findRelatedItem(Context context, MetadataValue metadataValue) { try { return itemService.find(context, UUIDUtils.fromString(metadataValue.getAuthority())); diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidSimpleValueObjectFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidSimpleValueObjectFactory.java index 4ddfbe47a328..28113e958332 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidSimpleValueObjectFactory.java +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidSimpleValueObjectFactory.java @@ -76,6 +76,7 @@ public Object create(Context context, List metadataValues) { public List getMetadataSignatures(Context context, Item item) { return metadataFields.stream() .flatMap(metadataField -> getMetadataValues(item, metadataField).stream()) + .filter(metadataValue -> isAllowedMetadataByVisibility(metadataValue)) .map(metadataValue -> metadataSignatureGenerator.generate(context, List.of(metadataValue))) .collect(Collectors.toList()); } diff --git a/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java index 1a657343c017..88a1033eca5f 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java @@ -7,13 +7,8 @@ */ package org.dspace.orcid.script; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * Script configuration for {@link OrcidBulkPush}. @@ -24,20 +19,8 @@ */ public class OrcidBulkPushScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Class getDspaceRunnableClass() { return dspaceRunnableClass; diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/OrcidQueueService.java b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidQueueService.java index 8de25e9caf1e..b667088eabb4 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/service/OrcidQueueService.java +++ b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidQueueService.java @@ -164,6 +164,16 @@ public List findByProfileItemAndEntity(Context context, Item profile */ public List findByProfileItemOrEntity(Context context, Item item) throws SQLException; + /** + * Get the OrcidQueue records where the given item is the entity. + * + * @param context DSpace context object + * @param item the item to search for + * @return the found OrcidQueue records + * @throws SQLException if database error + */ + public List findByEntity(Context context, Item item) throws SQLException; + /** * Get all the OrcidQueue records with attempts less than the given attempts. * diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidQueueServiceImpl.java b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidQueueServiceImpl.java index 98ab0c713a24..c0f70911b562 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidQueueServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidQueueServiceImpl.java @@ -81,6 +81,11 @@ public List findByProfileItemOrEntity(Context context, Item item) th return orcidQueueDAO.findByProfileItemOrEntity(context, item); } + @Override + public List findByEntity(Context context, Item item) throws SQLException { + return orcidQueueDAO.findByEntity(context, item); + } + @Override public long countByProfileItemId(Context context, UUID profileItemId) throws SQLException { return orcidQueueDAO.countByProfileItemId(context, profileItemId); diff --git a/dspace-api/src/main/java/org/dspace/profile/ResearcherProfileServiceImpl.java b/dspace-api/src/main/java/org/dspace/profile/ResearcherProfileServiceImpl.java index cec440df6d45..32915d74c0cf 100644 --- a/dspace-api/src/main/java/org/dspace/profile/ResearcherProfileServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/profile/ResearcherProfileServiceImpl.java @@ -314,17 +314,20 @@ private Item createProfileItem(Context context, EPerson ePerson, Collection coll item = installItemService.installItem(context, workspaceItem); + context.uncacheEntity(workspaceItem); + if (isNewProfileNotVisibleByDefault()) { Group anonymous = groupService.findByName(context, ANONYMOUS); authorizeService.removeGroupPolicies(context, item, anonymous); } - authorizeService.addPolicy(context, item, READ, ePerson); + itemService.addResourcePolicy(context, item, READ, ePerson); if (isAdditionOfWritePolicyOnProfileEnabled()) { - authorizeService.addPolicy(context, item, WRITE, ePerson); + itemService.addResourcePolicy(context, item, WRITE, ePerson); } + return reloadItem(context, item); } diff --git a/dspace-api/src/main/java/org/dspace/rdf/conversion/MetadataConverterPlugin.java b/dspace-api/src/main/java/org/dspace/rdf/conversion/MetadataConverterPlugin.java index 72ba03d99d27..0367556b8eab 100644 --- a/dspace-api/src/main/java/org/dspace/rdf/conversion/MetadataConverterPlugin.java +++ b/dspace-api/src/main/java/org/dspace/rdf/conversion/MetadataConverterPlugin.java @@ -136,6 +136,10 @@ public Model convert(Context context, DSpaceObject dso) List metadata_values = dsoService .getMetadata(dso, MetadataSchemaEnum.DC.getName(), Item.ANY, Item.ANY, Item.ANY); for (MetadataValue value : metadata_values) { + // skip empty values + if (value == null || StringUtils.isBlank(value.getValue())) { + continue; + } MetadataField metadataField = value.getMetadataField(); MetadataSchema metadataSchema = metadataField.getMetadataSchema(); String fieldname = metadataSchema.getName() + "." + metadataField.getElement(); diff --git a/dspace-api/src/main/java/org/dspace/script2externalservices/CreateWorkspaceItemWithExternalSource.java b/dspace-api/src/main/java/org/dspace/script2externalservices/CreateWorkspaceItemWithExternalSource.java index 2872ee19d125..7f8cf9e0772e 100644 --- a/dspace-api/src/main/java/org/dspace/script2externalservices/CreateWorkspaceItemWithExternalSource.java +++ b/dspace-api/src/main/java/org/dspace/script2externalservices/CreateWorkspaceItemWithExternalSource.java @@ -37,6 +37,7 @@ import org.dspace.content.MetadataValue; import org.dspace.content.WorkspaceItem; import org.dspace.content.dto.MetadataValueDTO; +import org.dspace.content.packager.PackageUtils; import org.dspace.core.Context; import org.dspace.discovery.DiscoverQuery; import org.dspace.discovery.DiscoverQuery.SORT_ORDER; @@ -287,6 +288,8 @@ private int fillWorkspaceItems(Context context, int record, LiveImportDataProvid if (!exist(dataObject.getMetadata())) { WorkspaceItem wsItem = externalDataService.createWorkspaceItemFromExternalDataObject(context, dataObject, this.collection); + Item itemFromWs = wsItem.getItem(); + PackageUtils.addDepositLicense(context, null, itemFromWs, wsItem.getCollection()); for (List metadataList : metadataValueToAdd(wsItem.getItem())) { addMetadata(wsItem.getItem(), metadataList); } diff --git a/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java b/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java index 2319aee31752..2ea0a52d6e34 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java +++ b/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java @@ -18,6 +18,7 @@ import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.lang3.StringUtils; +import org.dspace.cli.DSpaceSkipUnknownArgumentsParser; import org.dspace.eperson.EPerson; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.handler.DSpaceRunnableHandler; @@ -36,6 +37,11 @@ public abstract class DSpaceRunnable implements R */ protected CommandLine commandLine; + /** + * The minimal CommandLine object for the script that'll hold help information + */ + protected CommandLine helpCommandLine; + /** * This EPerson identifier variable is the UUID of the EPerson that's running the script */ @@ -64,26 +70,66 @@ private void setHandler(DSpaceRunnableHandler dSpaceRunnableHandler) { * @param args The arguments given to the script * @param dSpaceRunnableHandler The DSpaceRunnableHandler object that defines from where the script was ran * @param currentUser + * @return the result of this step; StepResult.Continue: continue the normal process, + * initialize is successful; otherwise exit the process (the help or version is shown) * @throws ParseException If something goes wrong */ - public void initialize(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler, + public StepResult initialize(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler, EPerson currentUser) throws ParseException { if (currentUser != null) { this.setEpersonIdentifier(currentUser.getID()); } this.setHandler(dSpaceRunnableHandler); - this.parse(args); + + // parse the command line in a first step for the help options + // --> no other option is required + StepResult result = this.parseForHelp(args); + switch (result) { + case Exit: + // arguments of the command line matches the help options, handle this + handleHelpCommandLine(); + break; + + case Continue: + // arguments of the command line matches NOT the help options, parse the args for the normal options + result = this.parse(args); + break; + default: + break; + } + + return result; + } + + + /** + * This method handle the help command line. In this easy implementation only the help is printed. For more + * complexity override this method. + */ + private void handleHelpCommandLine() { + printHelp(); } + /** * This method will take the primitive array of String objects that represent the parameters given to the String * and it'll parse these into a CommandLine object that can be used by the script to retrieve the data * @param args The primitive array of Strings representing the parameters * @throws ParseException If something goes wrong */ - private void parse(String[] args) throws ParseException { + private StepResult parse(String[] args) throws ParseException { commandLine = new DefaultParser().parse(getScriptConfiguration().getOptions(), args); setup(); + return StepResult.Continue; + } + + private StepResult parseForHelp(String[] args) throws ParseException { + helpCommandLine = new DSpaceSkipUnknownArgumentsParser().parse(getScriptConfiguration().getHelpOptions(), args); + if (helpCommandLine.getOptions() != null && helpCommandLine.getOptions().length > 0) { + return StepResult.Exit; + } + + return StepResult.Continue; } /** @@ -158,4 +204,8 @@ public UUID getEpersonIdentifier() { public void setEpersonIdentifier(UUID epersonIdentifier) { this.epersonIdentifier = epersonIdentifier; } + + public enum StepResult { + Continue, Exit; + } } diff --git a/dspace-api/src/main/java/org/dspace/scripts/Process.java b/dspace-api/src/main/java/org/dspace/scripts/Process.java index 6c521e2133db..049b7845da50 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/Process.java +++ b/dspace-api/src/main/java/org/dspace/scripts/Process.java @@ -21,6 +21,7 @@ import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.JoinTable; +import javax.persistence.Lob; import javax.persistence.ManyToMany; import javax.persistence.ManyToOne; import javax.persistence.SequenceGenerator; @@ -35,6 +36,7 @@ import org.dspace.core.ReloadableEntity; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; +import org.hibernate.annotations.Type; /** * This class is the DB Entity representation of the Process object to be stored in the Database @@ -68,6 +70,8 @@ public class Process implements ReloadableEntity { @Enumerated(EnumType.STRING) private ProcessStatus processStatus; + @Lob + @Type(type = "org.hibernate.type.TextType") @Column(name = "parameters") private String parameters; diff --git a/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java b/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java index c8a7812a5159..abb700cb10c9 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java @@ -37,7 +37,7 @@ public ScriptConfiguration getScriptConfiguration(String name) { @Override public List getScriptConfigurations(Context context) { return serviceManager.getServicesByType(ScriptConfiguration.class).stream().filter( - scriptConfiguration -> scriptConfiguration.isAllowedToExecute(context)) + scriptConfiguration -> scriptConfiguration.isAllowedToExecute(context, null)) .sorted(Comparator.comparing(ScriptConfiguration::getName)) .collect(Collectors.toList()); } diff --git a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java index 4b15c22f444a..5da7888ae11f 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java @@ -7,17 +7,29 @@ */ package org.dspace.scripts.configuration; +import java.sql.SQLException; +import java.util.List; + +import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.core.Context; +import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.DSpaceRunnable; import org.springframework.beans.factory.BeanNameAware; +import org.springframework.beans.factory.annotation.Autowired; /** * This class represents an Abstract class that a ScriptConfiguration can inherit to further implement this - * and represent a script's configuration + * and represent a script's configuration. + * By default script are available only to repository administrators script that have a broader audience + * must override the {@link #isAllowedToExecute(Context, List)} method. */ public abstract class ScriptConfiguration implements BeanNameAware { + @Autowired + protected AuthorizeService authorizeService; + /** * The possible options for this script */ @@ -70,14 +82,27 @@ public void setName(String name) { * @param dspaceRunnableClass The dspaceRunnableClass to be set on this IndexDiscoveryScriptConfiguration */ public abstract void setDspaceRunnableClass(Class dspaceRunnableClass); + /** * This method will return if the script is allowed to execute in the given context. This is by default set * to the currentUser in the context being an admin, however this can be overwritten by each script individually * if different rules apply * @param context The relevant DSpace context + * @param commandLineParameters the parameters that will be used to start the process if known, + * null otherwise * @return A boolean indicating whether the script is allowed to execute or not */ - public abstract boolean isAllowedToExecute(Context context); + public boolean isAllowedToExecute(Context context, List commandLineParameters) { + try { + return authorizeService.isAdmin(context); + } catch (SQLException e) { + throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + } + } + + public boolean isAllowedToExecute(Context context) { + return this.isAllowedToExecute(context, List.of()); + } /** * The getter for the options of the Script @@ -85,6 +110,19 @@ public void setName(String name) { */ public abstract Options getOptions(); + /** + * The getter for the options of the Script (help informations) + * + * @return the options value of this ScriptConfiguration for help + */ + public Options getHelpOptions() { + Options options = new Options(); + + options.addOption(Option.builder("h").longOpt("help").desc("help").hasArg(false).required(false).build()); + + return options; + } + @Override public void setBeanName(String beanName) { this.name = beanName; diff --git a/dspace-api/src/main/java/org/dspace/statistics/GeoIpService.java b/dspace-api/src/main/java/org/dspace/statistics/GeoIpService.java index 7f8a11e5ba13..40fea6cf54da 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/GeoIpService.java +++ b/dspace-api/src/main/java/org/dspace/statistics/GeoIpService.java @@ -37,7 +37,7 @@ public class GeoIpService { public DatabaseReader getDatabaseReader() throws IllegalStateException { String dbPath = configurationService.getProperty("usage-statistics.dbfile"); if (StringUtils.isBlank(dbPath)) { - throw new IllegalStateException("The required 'dbfile' configuration is missing in solr-statistics.cfg!"); + throw new IllegalStateException("The required 'dbfile' configuration is missing in usage-statistics.cfg!"); } try { diff --git a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java index 0ae1311e697f..204da303c770 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java @@ -20,9 +20,12 @@ import java.net.Inet4Address; import java.net.Inet6Address; import java.net.InetAddress; +import java.net.URI; import java.net.URLEncoder; import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.nio.file.Paths; import java.sql.SQLException; import java.text.DateFormat; import java.text.ParseException; @@ -180,6 +183,19 @@ protected SolrLoggerServiceImpl() { @Override public void afterPropertiesSet() throws Exception { + statisticsCoreURL = configurationService.getProperty("solr-statistics.server"); + + if (null != statisticsCoreURL) { + Path statisticsPath = Paths.get(new URI(statisticsCoreURL).getPath()); + statisticsCoreBase = statisticsPath + .getName(statisticsPath.getNameCount() - 1) + .toString(); + } else { + log.warn("Unable to find solr-statistics.server parameter in DSpace configuration. This is required for " + + "sharding statistics."); + statisticsCoreBase = null; + } + solr = solrStatisticsCore.getSolr(); // Read in the file so we don't have to do it all the time @@ -208,6 +224,13 @@ public void postView(DSpaceObject dspaceObject, HttpServletRequest request, EPer @Override public void postView(DSpaceObject dspaceObject, HttpServletRequest request, EPerson currentUser, Date time) { + postView(dspaceObject, request, currentUser, null, time); + } + + @Override + public void postView(DSpaceObject dspaceObject, HttpServletRequest request, + EPerson currentUser, String referrer, Date time) { + if (solr == null) { return; @@ -216,7 +239,7 @@ public void postView(DSpaceObject dspaceObject, HttpServletRequest request, try { - SolrInputDocument doc1 = getCommonSolrDoc(dspaceObject, request, currentUser, time); + SolrInputDocument doc1 = getCommonSolrDoc(dspaceObject, request, currentUser, referrer, time); if (doc1 == null) { return; } @@ -250,6 +273,12 @@ public void postView(DSpaceObject dspaceObject, HttpServletRequest request, @Override public void postView(DSpaceObject dspaceObject, String ip, String userAgent, String xforwardedfor, EPerson currentUser) { + postView(dspaceObject, ip, userAgent, xforwardedfor, currentUser, null); + } + + @Override + public void postView(DSpaceObject dspaceObject, + String ip, String userAgent, String xforwardedfor, EPerson currentUser, String referrer) { if (solr == null) { return; } @@ -257,7 +286,7 @@ public void postView(DSpaceObject dspaceObject, try { SolrInputDocument doc1 = getCommonSolrDoc(dspaceObject, ip, userAgent, xforwardedfor, - currentUser); + currentUser, referrer); if (doc1 == null) { return; } @@ -295,7 +324,7 @@ public void postLogin(DSpaceObject dspaceObject, HttpServletRequest request, EPe try { - SolrInputDocument document = getCommonSolrDoc(dspaceObject, request, currentUser, new Date()); + SolrInputDocument document = getCommonSolrDoc(dspaceObject, request, currentUser, null, new Date()); if (document == null) { return; @@ -331,7 +360,23 @@ public void postLogin(DSpaceObject dspaceObject, HttpServletRequest request, EPe * @throws SQLException in case of a database exception */ protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, HttpServletRequest request, - EPerson currentUser, Date time) throws SQLException { + EPerson currentUser) throws SQLException { + return getCommonSolrDoc(dspaceObject, request, currentUser, null, null); + } + + /** + * Returns a solr input document containing common information about the statistics + * regardless if we are logging a search or a view of a DSpace object + * + * @param dspaceObject the object used. + * @param request the current request context. + * @param currentUser the current session's user. + * @param referrer the optional referrer. + * @return a solr input document + * @throws SQLException in case of a database exception + */ + protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, HttpServletRequest request, + EPerson currentUser, String referrer, Date time) throws SQLException { boolean isSpiderBot = request != null && SpiderDetector.isSpider(request); if (isSpiderBot && !configurationService.getBooleanProperty("usage-statistics.logBots", true)) { @@ -354,7 +399,9 @@ protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, HttpServ } //Also store the referrer - if (request.getHeader("referer") != null) { + if (referrer != null) { + doc1.addField("referrer", referrer); + } else if (request.getHeader("referer") != null) { doc1.addField("referrer", request.getHeader("referer")); } @@ -423,7 +470,8 @@ protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, HttpServ } protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, String ip, String userAgent, - String xforwardedfor, EPerson currentUser) throws SQLException { + String xforwardedfor, EPerson currentUser, + String referrer) throws SQLException { boolean isSpiderBot = SpiderDetector.isSpider(ip); if (isSpiderBot && !configurationService.getBooleanProperty("usage-statistics.logBots", true)) { @@ -444,6 +492,11 @@ protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, String i doc1.addField("ip", ip); } + // Add the referrer, if present + if (referrer != null) { + doc1.addField("referrer", referrer); + } + InetAddress ipAddress = null; try { String dns; @@ -513,7 +566,7 @@ protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, String i public void postSearch(DSpaceObject resultObject, HttpServletRequest request, EPerson currentUser, List queries, int rpp, String sortBy, String order, int page, DSpaceObject scope) { try { - SolrInputDocument solrDoc = getCommonSolrDoc(resultObject, request, currentUser, new Date()); + SolrInputDocument solrDoc = getCommonSolrDoc(resultObject, request, currentUser, null, new Date()); if (solrDoc == null) { return; } @@ -563,7 +616,7 @@ public void postSearch(DSpaceObject resultObject, HttpServletRequest request, EP public void postWorkflow(UsageWorkflowEvent usageWorkflowEvent) throws SQLException { initSolrYearCores(); try { - SolrInputDocument solrDoc = getCommonSolrDoc(usageWorkflowEvent.getObject(), null, null, new Date()); + SolrInputDocument solrDoc = getCommonSolrDoc(usageWorkflowEvent.getObject(), null, null, null, new Date()); //Log the current collection & the scope ! solrDoc.addField("owningColl", usageWorkflowEvent.getScope().getID().toString()); @@ -1762,11 +1815,14 @@ protected synchronized void initSolrYearCores() { statisticYearCores .add(baseSolrUrl.replace("http://", "").replace("https://", "") + statCoreName); } - //Also add the core containing the current year ! - statisticYearCores.add(((HttpSolrClient) solr) + var baseCore = ((HttpSolrClient) solr) .getBaseURL() .replace("http://", "") - .replace("https://", "")); + .replace("https://", ""); + if (!statisticYearCores.contains(baseCore)) { + //Also add the core containing the current year, if it hasn't been added already + statisticYearCores.add(baseCore); + } } catch (IOException | SolrServerException e) { log.error(e.getMessage(), e); } diff --git a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerUsageEventListener.java b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerUsageEventListener.java index e56bca36ad5b..cef8702bf41a 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerUsageEventListener.java +++ b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerUsageEventListener.java @@ -8,6 +8,7 @@ package org.dspace.statistics; import java.util.ArrayList; +import java.util.Date; import java.util.List; import org.apache.logging.log4j.LogManager; @@ -50,10 +51,11 @@ public void receiveEvent(Event event) { if (UsageEvent.Action.VIEW == ue.getAction()) { if (ue.getRequest() != null) { - solrLoggerService.postView(ue.getObject(), ue.getRequest(), currentUser); + solrLoggerService + .postView(ue.getObject(), ue.getRequest(), currentUser, ue.getReferrer(), new Date()); } else { solrLoggerService.postView(ue.getObject(), ue.getIp(), ue.getUserAgent(), ue.getXforwardedfor(), - currentUser); + currentUser, ue.getReferrer()); } } else if (UsageEvent.Action.SEARCH == ue.getAction()) { UsageSearchEvent usageSearchEvent = (UsageSearchEvent) ue; diff --git a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDatasetDisplay.java b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDatasetDisplay.java index 941d4a33d314..75e18ec49e1f 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDatasetDisplay.java +++ b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDatasetDisplay.java @@ -278,9 +278,14 @@ public String composeFilterQuery(String startDate, String endDate, boolean relat } //Creates query for usage raport generator - public String composeQueryWithInverseRelation(DSpaceObject dSpaceObject, List default_queries ) { + public String composeQueryWithInverseRelation(DSpaceObject dSpaceObject, List default_queries, int type) { StringBuilder query = new StringBuilder(); - query.append("{!join from=search.resourceid to=id fromIndex="); + if (type == Constants.BITSTREAM) { + query.append("{!join from=search.resourceid to=owningItem fromIndex="); + } else { + query.append("{!join from=search.resourceid to=id fromIndex="); + } + query.append(configurationService.getProperty("solr.multicorePrefix")); query.append("search} "); boolean isFirstDefaultQuery = true; diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java index dcae4aa4cbcd..7d1015c8e2ba 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java @@ -7,13 +7,8 @@ */ package org.dspace.statistics.export; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link RetryFailedOpenUrlTracker} script @@ -21,9 +16,6 @@ public class RetryFailedOpenUrlTrackerScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -41,15 +33,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/statistics/service/SolrLoggerService.java b/dspace-api/src/main/java/org/dspace/statistics/service/SolrLoggerService.java index 8329e54308ed..d9d4f750a067 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/service/SolrLoggerService.java +++ b/dspace-api/src/main/java/org/dspace/statistics/service/SolrLoggerService.java @@ -60,9 +60,15 @@ public void postView(DSpaceObject dspaceObject, HttpServletRequest request, public void postView(DSpaceObject dspaceObject, HttpServletRequest request, EPerson currentUser, Date time); + void postView(DSpaceObject dspaceObject, HttpServletRequest request, + EPerson currentUser, String referrer, Date time); + public void postView(DSpaceObject dspaceObject, String ip, String userAgent, String xforwardedfor, EPerson currentUser); + void postView(DSpaceObject dspaceObject, + String ip, String userAgent, String xforwardedfor, EPerson currentUser, String referrer); + public void postLogin(DSpaceObject object, HttpServletRequest request, EPerson currentUser); public void postSearch(DSpaceObject resultObject, HttpServletRequest request, EPerson currentUser, diff --git a/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsClient.java b/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsClient.java index e45ce163ed77..319fe437d648 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsClient.java +++ b/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsClient.java @@ -67,7 +67,6 @@ public static void main(String[] args) throws Exception { options.addOption("m", "mark-spiders", false, "Update isBot Flag in Solr"); options.addOption("f", "delete-spiders-by-flag", false, "Delete Spiders in Solr By isBot Flag"); options.addOption("i", "delete-spiders-by-ip", false, "Delete Spiders in Solr By IP Address"); - options.addOption("o", "optimize", false, "Run maintenance on the SOLR index"); options.addOption("b", "reindex-bitstreams", false, "Reindex the bitstreams to ensure we have the bundle name"); options.addOption("e", "export", false, "Export SOLR view statistics data to usage-statistics-intermediate-format"); @@ -93,8 +92,6 @@ public static void main(String[] args) throws Exception { solrLoggerService.deleteRobotsByIsBotFlag(); } else if (line.hasOption('i')) { solrLoggerService.deleteRobotsByIP(); - } else if (line.hasOption('o')) { - solrLoggerService.optimizeSOLR(); } else if (line.hasOption('b')) { solrLoggerService.reindexBitstreamHits(line.hasOption('r')); } else if (line.hasOption('e')) { diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java index 209c1e21e74d..5b367d7a8136 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java @@ -14,6 +14,8 @@ import java.security.DigestInputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; +import java.util.HashMap; +import java.util.List; import java.util.Map; import org.apache.commons.lang3.StringUtils; @@ -153,22 +155,24 @@ protected boolean isLonger(String internalId, int endIndex) { * Retrieves a map of useful metadata about the File (size, checksum, modified) * * @param file The File to analyze - * @param attrs The map where we are storing values + * @param attrs The list of requested metadata values * @return Map of updated metadatas / attrs * @throws IOException */ - public Map about(File file, Map attrs) throws IOException { + public Map about(File file, List attrs) throws IOException { + + Map metadata = new HashMap(); + try { if (file != null && file.exists()) { - this.putValueIfExistsKey(attrs, SIZE_BYTES, file.length()); - if (attrs.containsKey(CHECKSUM)) { - attrs.put(CHECKSUM, Utils.toHex(this.generateChecksumFrom(file))); - attrs.put(CHECKSUM_ALGORITHM, CSA); + this.putValueIfExistsKey(attrs, metadata, SIZE_BYTES, file.length()); + if (attrs.contains(CHECKSUM)) { + metadata.put(CHECKSUM, Utils.toHex(this.generateChecksumFrom(file))); + metadata.put(CHECKSUM_ALGORITHM, CSA); } - this.putValueIfExistsKey(attrs, MODIFIED, String.valueOf(file.lastModified())); - return attrs; + this.putValueIfExistsKey(attrs, metadata, MODIFIED, String.valueOf(file.lastModified())); } - return null; + return metadata; } catch (Exception e) { log.error("about( FilePath: " + file.getAbsolutePath() + ", Map: " + attrs.toString() + ")", e); throw new IOException(e); @@ -204,13 +208,9 @@ private byte[] generateChecksumFrom(FileInputStream fis) throws IOException, NoS } } - protected void putValueIfExistsKey(Map attrs, String key, Object value) { - this.putEntryIfExistsKey(attrs, key, Map.entry(key, value)); - } - - protected void putEntryIfExistsKey(Map attrs, String key, Map.Entry entry) { - if (attrs.containsKey(key)) { - attrs.put(entry.getKey(), entry.getValue()); + protected void putValueIfExistsKey(List attrs, Map metadata, String key, Object value) { + if (attrs.contains(key)) { + metadata.put(key, value); } } diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java index c91db9b1c3b2..2da5e84e8db0 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java @@ -9,6 +9,7 @@ import java.io.IOException; import java.io.InputStream; +import java.util.List; import java.util.Map; import org.dspace.content.Bitstream; @@ -62,13 +63,13 @@ public interface BitStoreService { * Obtain technical metadata about an asset in the asset store. * * @param bitstream The bitstream to describe - * @param attrs A Map whose keys consist of desired metadata fields + * @param attrs A List of desired metadata fields * @return attrs * A Map with key/value pairs of desired metadata * If file not found, then return null * @throws java.io.IOException If a problem occurs while obtaining metadata */ - public Map about(Bitstream bitstream, Map attrs) throws IOException; + public Map about(Bitstream bitstream, List attrs) throws IOException; /** * Remove an asset from the asset store. diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java index d6d625ebbdf6..fcebbe676b80 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java @@ -166,12 +166,9 @@ public UUID register(Context context, Bitstream bitstream, int assetstore, bitstream.setStoreNumber(assetstore); bitstreamService.update(context, bitstream); - Map wantedMetadata = new HashMap(); - wantedMetadata.put("size_bytes", null); - wantedMetadata.put("checksum", null); - wantedMetadata.put("checksum_algorithm", null); + List wantedMetadata = List.of("size_bytes", "checksum", "checksum_algorithm"); + Map receivedMetadata = this.getStore(assetstore).about(bitstream, wantedMetadata); - Map receivedMetadata = this.getStore(assetstore).about(bitstream, wantedMetadata); if (MapUtils.isEmpty(receivedMetadata)) { String message = "Not able to register bitstream:" + bitstream.getID() + " at path: " + bitstreamPath; log.error(message); @@ -201,13 +198,8 @@ public UUID register(Context context, Bitstream bitstream, int assetstore, } @Override - public Map computeChecksum(Context context, Bitstream bitstream) throws IOException { - Map wantedMetadata = new HashMap(); - wantedMetadata.put("checksum", null); - wantedMetadata.put("checksum_algorithm", null); - - Map receivedMetadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, wantedMetadata); - return receivedMetadata; + public Map computeChecksum(Context context, Bitstream bitstream) throws IOException { + return this.getStore(bitstream.getStoreNumber()).about(bitstream, List.of("checksum", "checksum_algorithm")); } @Override @@ -252,10 +244,9 @@ public void cleanup(boolean deleteDbRecords, boolean verbose) throws SQLExceptio for (Bitstream bitstream : storage) { UUID bid = bitstream.getID(); - Map wantedMetadata = new HashMap(); - wantedMetadata.put("size_bytes", null); - wantedMetadata.put("modified", null); - Map receivedMetadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, wantedMetadata); + List wantedMetadata = List.of("size_bytes", "modified"); + Map receivedMetadata = this.getStore(bitstream.getStoreNumber()) + .about(bitstream, wantedMetadata); // Make sure entries which do not exist are removed @@ -320,7 +311,10 @@ public void cleanup(boolean deleteDbRecords, boolean verbose) throws SQLExceptio context.uncacheEntity(bitstream); } - context.dispatchEvents(); + // Commit actual changes to DB after dispatch events + System.out.print("Performing incremental commit to the database..."); + context.commit(); + System.out.println(" Incremental commit done!"); cleanedBitstreamCount = cleanedBitstreamCount + storage.size(); @@ -350,13 +344,11 @@ public void cleanup(boolean deleteDbRecords, boolean verbose) throws SQLExceptio @Nullable @Override public Long getLastModified(Bitstream bitstream) throws IOException { - Map attrs = new HashMap(); - attrs.put("modified", null); - attrs = this.getStore(bitstream.getStoreNumber()).about(bitstream, attrs); - if (attrs == null || !attrs.containsKey("modified")) { + Map metadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, List.of("modified")); + if (metadata == null || !metadata.containsKey("modified")) { return null; } - return Long.valueOf(attrs.get("modified").toString()); + return Long.valueOf(metadata.get("modified").toString()); } /** diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java index 468d22ca738d..2fa9a9dbd5f6 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java @@ -15,6 +15,7 @@ import java.security.DigestInputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; +import java.util.List; import java.util.Map; import org.apache.logging.log4j.Logger; @@ -126,13 +127,13 @@ public void put(Bitstream bitstream, InputStream in) throws IOException { /** * Obtain technical metadata about an asset in the asset store. * - * @param bitstream The asset to describe - * @param attrs A Map whose keys consist of desired metadata fields - * @return attrs - * A Map with key/value pairs of desired metadata - * @throws java.io.IOException If a problem occurs while obtaining metadata + * @param bitstream The asset to describe + * @param attrs A List of desired metadata fields + * @return attrs A Map with key/value pairs of desired metadata + * @throws java.io.IOException If a problem occurs while obtaining + * metadata */ - public Map about(Bitstream bitstream, Map attrs) throws IOException { + public Map about(Bitstream bitstream, List attrs) throws IOException { try { // potentially expensive, since it may calculate the checksum File file = getFile(bitstream); diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java index d6056028c7f3..21840f453112 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java @@ -7,6 +7,8 @@ */ package org.dspace.storage.bitstore; +import static java.lang.String.valueOf; + import java.io.File; import java.io.FileOutputStream; import java.io.IOException; @@ -14,6 +16,8 @@ import java.security.DigestInputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; +import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.UUID; import java.util.function.Supplier; @@ -26,7 +30,6 @@ import com.amazonaws.regions.Region; import com.amazonaws.regions.Regions; import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3Client; import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.amazonaws.services.s3.model.AmazonS3Exception; import com.amazonaws.services.s3.model.GetObjectRequest; @@ -41,8 +44,8 @@ import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.codec.binary.Base64; import org.apache.commons.io.IOUtils; +import org.apache.commons.io.output.NullOutputStream; import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpStatus; import org.apache.logging.log4j.LogManager; @@ -151,9 +154,8 @@ public S3BitStoreService() {} * * @param s3Service AmazonS3 service */ - protected S3BitStoreService(AmazonS3 s3Service, TransferManager tm) { + protected S3BitStoreService(AmazonS3 s3Service) { this.s3Service = s3Service; - this.tm = tm; } @Override @@ -213,7 +215,7 @@ public void init() throws IOException { } try { - if (!s3Service.doesBucketExist(bucketName)) { + if (!s3Service.doesBucketExistV2(bucketName)) { s3Service.createBucket(bucketName); log.info("Creating new S3 Bucket: " + bucketName); } @@ -299,10 +301,6 @@ public void put(Bitstream bitstream, InputStream in) throws IOException { ) { Utils.bufferedCopy(dis, fos); in.close(); - byte[] md5Digest = dis.getMessageDigest().digest(); - String md5Base64 = Base64.encodeBase64String(md5Digest); - ObjectMetadata objMetadata = new ObjectMetadata(); - objMetadata.setContentMD5(md5Base64); Upload upload = tm.upload(bucketName, key, scratchFile); @@ -311,7 +309,7 @@ public void put(Bitstream bitstream, InputStream in) throws IOException { bitstream.setSizeBytes(scratchFile.length()); // we cannot use the S3 ETAG here as it could be not a MD5 in case of multipart upload (large files) or if // the bucket is encrypted - bitstream.setChecksum(Utils.toHex(md5Digest)); + bitstream.setChecksum(Utils.toHex(dis.getMessageDigest().digest())); bitstream.setChecksumAlgorithm(CSA); } catch (AmazonClientException | IOException | InterruptedException e) { @@ -334,86 +332,56 @@ public void put(Bitstream bitstream, InputStream in) throws IOException { * (Does not use getContentMD5, as that is 128-bit MD5 digest calculated on caller's side) * * @param bitstream The asset to describe - * @param attrs A Map whose keys consist of desired metadata fields + * @param attrs A List of desired metadata fields * @return attrs * A Map with key/value pairs of desired metadata * If file not found, then return null * @throws java.io.IOException If a problem occurs while obtaining metadata */ @Override - public Map about(Bitstream bitstream, Map attrs) throws IOException { + public Map about(Bitstream bitstream, List attrs) throws IOException { + String key = getFullKey(bitstream.getInternalId()); // If this is a registered bitstream, strip the -R prefix before retrieving if (isRegisteredBitstream(key)) { key = key.substring(REGISTERED_FLAG.length()); } + + Map metadata = new HashMap<>(); + try { + ObjectMetadata objectMetadata = s3Service.getObjectMetadata(bucketName, key); if (objectMetadata != null) { - if (attrs.containsKey("size_bytes")) { - attrs.put("size_bytes", objectMetadata.getContentLength()); - } - if (attrs.containsKey("modified")) { - attrs.put("modified", String.valueOf(objectMetadata.getLastModified().getTime())); - } + putValueIfExistsKey(attrs, metadata, "size_bytes", objectMetadata.getContentLength()); + putValueIfExistsKey(attrs, metadata, "modified", valueOf(objectMetadata.getLastModified().getTime())); } - try ( - InputStream in = get(bitstream); - // Read through a digest input stream that will work out the MD5 - DigestInputStream dis = new DigestInputStream(in, MessageDigest.getInstance(CSA)); - ) { - in.close(); - byte[] md5Digest = dis.getMessageDigest().digest(); - String md5Base64 = Base64.encodeBase64String(md5Digest); - attrs.put("checksum", md5Base64); - attrs.put("checksum_algorithm", CSA); - } catch (NoSuchAlgorithmException nsae) { - // Should never happen - log.warn("Caught NoSuchAlgorithmException", nsae); + + putValueIfExistsKey(attrs, metadata, "checksum_algorithm", CSA); + + if (attrs.contains("checksum")) { + try (InputStream in = get(bitstream); + DigestInputStream dis = new DigestInputStream(in, MessageDigest.getInstance(CSA)) + ) { + Utils.copy(dis, NullOutputStream.NULL_OUTPUT_STREAM); + byte[] md5Digest = dis.getMessageDigest().digest(); + metadata.put("checksum", Utils.toHex(md5Digest)); + } catch (NoSuchAlgorithmException nsae) { + // Should never happen + log.warn("Caught NoSuchAlgorithmException", nsae); + } } - return attrs; + + return metadata; } catch (AmazonS3Exception e) { if (e.getStatusCode() == HttpStatus.SC_NOT_FOUND) { - return null; + return metadata; } } catch (AmazonClientException e) { log.error("about(" + key + ", attrs)", e); throw new IOException(e); } - return null; - } - - private boolean isMD5Checksum(String eTag) { - // if the etag is NOT an MD5 it end with -x where x is the number of part used in the multipart upload - return StringUtils.contains(eTag, "-"); - } - - /** - * Populates map values by checking key existence - *
      - * Adds technical metadata about an asset in the asset store, like: - *

        - *
      • size_bytes
      • - *
      • checksum
      • - *
      • checksum_algorithm
      • - *
      • modified
      • - *
      - * - * @param objectMetadata containing technical data - * @param attrs map with keys populated - * @return Map of enriched attrs with values - */ - public Map about(ObjectMetadata objectMetadata, Map attrs) { - if (objectMetadata != null) { - this.putValueIfExistsKey(attrs, SIZE_BYTES, objectMetadata.getContentLength()); - - // put CHECKSUM_ALGORITHM if exists CHECKSUM - this.putValueIfExistsKey(attrs, CHECKSUM, objectMetadata.getETag()); - this.putEntryIfExistsKey(attrs, CHECKSUM, Map.entry(CHECKSUM_ALGORITHM, CSA)); - - this.putValueIfExistsKey(attrs, MODIFIED, String.valueOf(objectMetadata.getLastModified().getTime())); - } - return attrs; + return metadata; } /** @@ -577,13 +545,14 @@ public static void main(String[] args) throws Exception { String accessKey = command.getOptionValue("a"); String secretKey = command.getOptionValue("s"); - String assetFile = command.getOptionValue("f"); S3BitStoreService store = new S3BitStoreService(); AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey); - store.s3Service = new AmazonS3Client(awsCredentials); + store.s3Service = AmazonS3ClientBuilder.standard() + .withCredentials(new AWSStaticCredentialsProvider(awsCredentials)) + .build(); //Todo configurable region Region usEast1 = Region.getRegion(Regions.US_EAST_1); diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/service/BitstreamStorageService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/service/BitstreamStorageService.java index b979811be501..fd41b2486966 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/service/BitstreamStorageService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/service/BitstreamStorageService.java @@ -102,7 +102,7 @@ public interface BitstreamStorageService { public UUID register(Context context, Bitstream bitstream, int assetstore, String bitstreamPath) throws SQLException, IOException, AuthorizeException; - public Map computeChecksum(Context context, Bitstream bitstream) throws IOException; + public Map computeChecksum(Context context, Bitstream bitstream) throws IOException; /** * Does the internal_id column in the bitstream row indicate the bitstream diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java index ce5b0f8d0a4c..fd33b111a1b9 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java @@ -73,7 +73,6 @@ public class DatabaseUtils { // Types of databases supported by DSpace. See getDbType() public static final String DBMS_POSTGRES = "postgres"; - public static final String DBMS_ORACLE = "oracle"; public static final String DBMS_H2 = "h2"; // Name of the table that Flyway uses for its migration history @@ -323,9 +322,7 @@ public static void main(String[] argv) { System.out .println("\nWARNING: ALL DATA AND TABLES IN YOUR DATABASE WILL BE PERMANENTLY DELETED.\n"); System.out.println("There is NO turning back from this action. Backup your DB before continuing."); - if (dbType.equals(DBMS_ORACLE)) { - System.out.println("\nORACLE WARNING: your RECYCLEBIN will also be PURGED.\n"); - } else if (dbType.equals(DBMS_POSTGRES)) { + if (dbType.equals(DBMS_POSTGRES)) { System.out.println( "\nPOSTGRES WARNING: the '" + PostgresUtils.PGCRYPTO + "' extension will be dropped if it" + " is in the same schema as the DSpace database.\n"); @@ -413,11 +410,10 @@ private static void printDBInfo(Connection connection) throws SQLException { DatabaseMetaData meta = connection.getMetaData(); String dbType = getDbType(connection); System.out.println("\nDatabase Type: " + dbType); - if (dbType.equals(DBMS_ORACLE)) { - System.out.println("===================================="); - System.out.println("WARNING: Oracle support is deprecated!"); - System.out.println("See https://github.com/DSpace/DSpace/issues/8214"); - System.out.println("====================================="); + if (!dbType.equals(DBMS_POSTGRES) && !dbType.equals(DBMS_H2)) { + System.err.println("===================================="); + System.err.println("ERROR: Database type " + dbType + " is UNSUPPORTED!"); + System.err.println("====================================="); } System.out.println("Database URL: " + meta.getURL()); System.out.println("Database Schema: " + getSchemaName(connection)); @@ -552,10 +548,6 @@ private synchronized static FluentConfiguration setupFlyway(DataSource datasourc String dbType = getDbType(connection); connection.close(); - if (dbType.equals(DBMS_ORACLE)) { - log.warn("ORACLE SUPPORT IS DEPRECATED! See https://github.com/DSpace/DSpace/issues/8214"); - } - // Determine location(s) where Flyway will load all DB migrations ArrayList scriptLocations = new ArrayList<>(); @@ -791,26 +783,6 @@ private static synchronized void cleanDatabase(Flyway flyway, DataSource dataSou // First, run Flyway's clean command on database. // For MOST database types, this takes care of everything flyway.clean(); - - try (Connection connection = dataSource.getConnection()) { - // Get info about which database type we are using - String dbType = getDbType(connection); - - // If this is Oracle, the only way to entirely clean the database - // is to also purge the "Recyclebin". See: - // http://docs.oracle.com/cd/B19306_01/server.102/b14200/statements_9018.htm - if (dbType.equals(DBMS_ORACLE)) { - PreparedStatement statement = null; - try { - statement = connection.prepareStatement("PURGE RECYCLEBIN"); - statement.executeQuery(); - } finally { - if (statement != null && !statement.isClosed()) { - statement.close(); - } - } - } - } } catch (FlywayException fe) { // If any FlywayException (Runtime) is thrown, change it to a SQLException throw new SQLException("Flyway clean error occurred", fe); @@ -1059,11 +1031,6 @@ public static boolean sequenceExists(Connection connection, String sequenceName) // We need to filter by schema in PostgreSQL schemaFilter = true; break; - case DBMS_ORACLE: - // Oracle specific query for a sequence owned by our current DSpace user - // NOTE: No need to filter by schema for Oracle, as Schema = User - sequenceSQL = "SELECT COUNT(1) FROM user_sequences WHERE sequence_name=?"; - break; case DBMS_H2: // In H2, sequences are listed in the "information_schema.sequences" table // SEE: http://www.h2database.com/html/grammar.html#information_schema @@ -1167,11 +1134,6 @@ public static String getSchemaName(Connection connection) // For PostgreSQL, the default schema is named "public" // See: http://www.postgresql.org/docs/9.0/static/ddl-schemas.html schema = "public"; - } else if (dbType.equals(DBMS_ORACLE)) { - // For Oracle, default schema is actually the user account - // See: http://stackoverflow.com/a/13341390 - DatabaseMetaData meta = connection.getMetaData(); - schema = meta.getUserName(); } else { // For H2 (in memory), there is no such thing as a schema schema = null; @@ -1320,6 +1282,7 @@ public void run() { Context context = null; try { context = new Context(); + context.setMode(Context.Mode.READ_ONLY); context.turnOffAuthorisationSystem(); log.info( "Post database migration, reindexing all content in Discovery search and browse engine"); @@ -1369,8 +1332,6 @@ public static String getDbType(Connection connection) String dbms_lc = prodName.toLowerCase(Locale.ROOT); if (dbms_lc.contains("postgresql")) { return DBMS_POSTGRES; - } else if (dbms_lc.contains("oracle")) { - return DBMS_ORACLE; } else if (dbms_lc.contains("h2")) { // Used for unit testing only return DBMS_H2; diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java index 7debf3ba449b..d6577dc7e19d 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java @@ -7,9 +7,9 @@ */ package org.dspace.storage.rdbms; -import java.io.File; import java.io.IOException; import java.sql.SQLException; +import java.util.List; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.xpath.XPathExpressionException; @@ -20,8 +20,6 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.content.NonUniqueMetadataException; import org.dspace.core.Context; -import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; import org.flywaydb.core.api.callback.Callback; import org.flywaydb.core.api.callback.Event; import org.slf4j.Logger; @@ -58,30 +56,31 @@ public class RegistryUpdater implements Callback { * Method to actually update our registries from latest configuration files. */ private void updateRegistries() { - ConfigurationService config = DSpaceServicesFactory.getInstance().getConfigurationService(); Context context = null; try { context = new Context(); context.turnOffAuthorisationSystem(); - String base = config.getProperty("dspace.dir") - + File.separator + "config" + File.separator - + "registries" + File.separator; - - // Load updates to Bitstream format registry (if any) - log.info("Updating Bitstream Format Registry based on {}bitstream-formats.xml", base); - RegistryLoader.loadBitstreamFormats(context, base + "bitstream-formats.xml"); + // Load updates to Bitstream formats registries (if any) + List registryBitstreamFormatFiles = + MetadataImporter.getAllRegistryFiles(MetadataImporter.REGISTRY_BITSTREAM_FORMAT_PROPERTY); + for (String bitstreamFormat : registryBitstreamFormatFiles) { + log.info("Updating Bitstream Format Registry based on {}", bitstreamFormat); + RegistryLoader.loadBitstreamFormats(context, bitstreamFormat); + } // Load updates to Metadata schema registries (if any) - log.info("Updating Metadata Registries based on metadata type configs in {}", base); - for (String namespaceFile: config.getArrayProperty("registry.metadata.load")) { - log.info("Reading {}", namespaceFile); - MetadataImporter.loadRegistry(base + namespaceFile, true); + List registryMetadataFiles = + MetadataImporter.getAllRegistryFiles(MetadataImporter.REGISTRY_METADATA_PROPERTY); + log.info("Updating Metadata Registries based on metadata type configs in {}", MetadataImporter.BASE); + for (String metadataFile : registryMetadataFiles) { + log.info("Reading {}", metadataFile); + MetadataImporter.loadRegistry(metadataFile, true); } String workflowTypes = "workflow-types.xml"; log.info("Reading {}", workflowTypes); - MetadataImporter.loadRegistry(base + workflowTypes, true); + MetadataImporter.loadRegistry( MetadataImporter.BASE + workflowTypes, true); context.restoreAuthSystemState(); // Commit changes and close context diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java index 842fc15e1657..f0c4e4e17990 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java @@ -78,13 +78,6 @@ protected static Integer dropDBConstraint(Connection connection, String tableNam constraintName += "_" + StringUtils.lowerCase(constraintSuffix); cascade = true; break; - case "oracle": - // In Oracle, constraints are listed in the USER_CONS_COLUMNS table - constraintNameSQL = "SELECT CONSTRAINT_NAME " + - "FROM USER_CONS_COLUMNS " + - "WHERE TABLE_NAME = ? AND COLUMN_NAME = ?"; - cascade = true; - break; case "h2": // In H2, column constraints are listed in the "INFORMATION_SCHEMA.KEY_COLUMN_USAGE" table constraintNameSQL = "SELECT DISTINCT CONSTRAINT_NAME " + @@ -160,9 +153,6 @@ protected static Integer dropDBTable(Connection connection, String tableName) case "postgresql": dropTableSQL = "DROP TABLE IF EXISTS " + tableName + " CASCADE"; break; - case "oracle": - dropTableSQL = "DROP TABLE " + tableName + " CASCADE CONSTRAINTS"; - break; case "h2": dropTableSQL = "DROP TABLE IF EXISTS " + tableName + " CASCADE"; break; @@ -208,9 +198,6 @@ protected static Integer dropDBSequence(Connection connection, String sequenceNa case "postgresql": dropSequenceSQL = "DROP SEQUENCE IF EXISTS " + sequenceName; break; - case "oracle": - dropSequenceSQL = "DROP SEQUENCE " + sequenceName; - break; case "h2": dropSequenceSQL = "DROP SEQUENCE IF EXISTS " + sequenceName; break; @@ -256,9 +243,6 @@ protected static Integer dropDBView(Connection connection, String viewName) case "postgresql": dropViewSQL = "DROP VIEW IF EXISTS " + viewName + " CASCADE"; break; - case "oracle": - dropViewSQL = "DROP VIEW " + viewName + " CASCADE CONSTRAINTS"; - break; case "h2": dropViewSQL = "DROP VIEW IF EXISTS " + viewName + " CASCADE"; break; diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java index 56c5b474d9fc..758e745ddc86 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java @@ -19,10 +19,9 @@ * of the "community" table. This is necessary for the upgrade from 1.3 to 1.4 *

      * This class was created because the names of database constraints differs based - * on the type of database (Postgres vs. Oracle vs. H2). As such, it becomes difficult + * on the type of database (Postgres vs. H2). As such, it becomes difficult * to write simple SQL which will work for multiple database types (especially - * since unit tests require H2 and the syntax for H2 is different from either - * Oracle or Postgres). + * since unit tests require H2 and the syntax for H2 is different from Postgres). *

      * NOTE: This migration class is very simple because it is meant to be used * in conjuction with the corresponding SQL script: diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java index 6d82055e530e..37100a17f926 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java @@ -19,10 +19,9 @@ * from 1.5 to 1.6 *

      * This class was created because the names of database constraints differs based - * on the type of database (Postgres vs. Oracle vs. H2). As such, it becomes difficult + * on the type of database (Postgres vs. H2). As such, it becomes difficult * to write simple SQL which will work for multiple database types (especially - * since unit tests require H2 and the syntax for H2 is different from either - * Oracle or Postgres). + * since unit tests require H2 and the syntax for H2 is different from Postgres). *

      * NOTE: This migration class is very simple because it is meant to be used * in conjuction with the corresponding SQL script: diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java index ea72d99b6e29..8e2be91127c8 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java @@ -20,10 +20,9 @@ * this column must be renamed to "resource_id". *

      * This class was created because the names of database constraints differs based - * on the type of database (Postgres vs. Oracle vs. H2). As such, it becomes difficult + * on the type of database (Postgres vs. H2). As such, it becomes difficult * to write simple SQL which will work for multiple database types (especially - * since unit tests require H2 and the syntax for H2 is different from either - * Oracle or Postgres). + * since unit tests require H2 and the syntax for H2 is different from Postgres). *

      * NOTE: This migration class is very simple because it is meant to be used * in conjuction with the corresponding SQL script: diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java index b3306a9fc93c..0361e6805356 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java @@ -67,8 +67,6 @@ public void migrate(Context context) String dbFileLocation = null; if (dbtype.toLowerCase().contains("postgres")) { dbFileLocation = "postgres"; - } else if (dbtype.toLowerCase().contains("oracle")) { - dbFileLocation = "oracle"; } else if (dbtype.toLowerCase().contains("h2")) { dbFileLocation = "h2"; } diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java index 9aa0f4877c39..4c1cf3365395 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java @@ -46,8 +46,6 @@ public void migrate(Context context) throws Exception { String dbFileLocation = null; if (dbtype.toLowerCase().contains("postgres")) { dbFileLocation = "postgres"; - } else if (dbtype.toLowerCase().contains("oracle")) { - dbFileLocation = "oracle"; } else if (dbtype.toLowerCase().contains("h2")) { dbFileLocation = "h2"; } diff --git a/dspace-api/src/main/java/org/dspace/submit/consumer/SubmissionConfigConsumer.java b/dspace-api/src/main/java/org/dspace/submit/consumer/SubmissionConfigConsumer.java new file mode 100644 index 000000000000..a593fe8ae066 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/submit/consumer/SubmissionConfigConsumer.java @@ -0,0 +1,83 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.submit.consumer; + +import org.apache.logging.log4j.Logger; +import org.dspace.content.Collection; +import org.dspace.content.DSpaceObject; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.discovery.IndexingService; +import org.dspace.discovery.indexobject.IndexableCollection; +import org.dspace.event.Consumer; +import org.dspace.event.Event; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.submit.factory.SubmissionServiceFactory; + +/** + * Consumer implementation to be used for Item Submission Configuration + * + * @author paulo.graca at fccn.pt + */ +public class SubmissionConfigConsumer implements Consumer { + /** + * log4j logger + */ + private static Logger log = org.apache.logging.log4j.LogManager.getLogger(SubmissionConfigConsumer.class); + + IndexingService indexer = DSpaceServicesFactory.getInstance().getServiceManager() + .getServiceByName(IndexingService.class.getName(), + IndexingService.class); + + @Override + public void initialize() throws Exception { + // No-op + } + + @Override + public void consume(Context ctx, Event event) throws Exception { + int st = event.getSubjectType(); + int et = event.getEventType(); + + + if ( st == Constants.COLLECTION ) { + switch (et) { + case Event.MODIFY_METADATA: + // Submission configuration it's based on solr + // for collection's entity type but, at this point + // that info isn't indexed yet, we need to force it + DSpaceObject subject = event.getSubject(ctx); + Collection collectionFromDSOSubject = (Collection) subject; + indexer.indexContent(ctx, new IndexableCollection (collectionFromDSOSubject), true, false, false); + indexer.commit(); + + log.debug("SubmissionConfigConsumer occured: " + event.toString()); + // reload submission configurations + SubmissionServiceFactory.getInstance().getSubmissionConfigService().reload(); + break; + + default: + log.debug("SubmissionConfigConsumer occured: " + event.toString()); + // reload submission configurations + SubmissionServiceFactory.getInstance().getSubmissionConfigService().reload(); + break; + } + } + } + + @Override + public void end(Context ctx) throws Exception { + // No-op + } + + @Override + public void finish(Context ctx) throws Exception { + // No-op + } + +} diff --git a/dspace-api/src/main/java/org/dspace/submit/factory/SubmissionServiceFactory.java b/dspace-api/src/main/java/org/dspace/submit/factory/SubmissionServiceFactory.java new file mode 100644 index 000000000000..6020f13b46cc --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/submit/factory/SubmissionServiceFactory.java @@ -0,0 +1,28 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.submit.factory; + +import org.dspace.app.util.SubmissionConfigReaderException; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.submit.service.SubmissionConfigService; + +/** + * Abstract factory to get services for submission, use SubmissionServiceFactory.getInstance() to retrieve an + * implementation + * + * @author paulo.graca at fccn.pt + */ +public abstract class SubmissionServiceFactory { + + public abstract SubmissionConfigService getSubmissionConfigService() throws SubmissionConfigReaderException; + + public static SubmissionServiceFactory getInstance() { + return DSpaceServicesFactory.getInstance().getServiceManager() + .getServiceByName("submissionServiceFactory", SubmissionServiceFactory.class); + } +} diff --git a/dspace-api/src/main/java/org/dspace/submit/factory/SubmissionServiceFactoryImpl.java b/dspace-api/src/main/java/org/dspace/submit/factory/SubmissionServiceFactoryImpl.java new file mode 100644 index 000000000000..19f050859769 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/submit/factory/SubmissionServiceFactoryImpl.java @@ -0,0 +1,28 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.submit.factory; + +import org.dspace.app.util.SubmissionConfigReaderException; +import org.dspace.submit.service.SubmissionConfigService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Factory implementation to get services for submission, use SubmissionServiceFactory.getInstance() to + * retrieve an implementation + * + * @author paulo.graca at fccn.pt + */ +public class SubmissionServiceFactoryImpl extends SubmissionServiceFactory { + @Autowired(required = true) + private SubmissionConfigService submissionConfigService; + + @Override + public SubmissionConfigService getSubmissionConfigService() throws SubmissionConfigReaderException { + return submissionConfigService; + } +} diff --git a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationCliScriptConfiguration.java index 41b15ddd7a5a..894d3491a181 100644 --- a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationCliScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationCliScriptConfiguration.java @@ -7,13 +7,8 @@ */ package org.dspace.submit.migration; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link SubmissionFormsMigration} script @@ -23,9 +18,6 @@ public class SubmissionFormsMigrationCliScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -38,15 +30,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationScriptConfiguration.java index af3574da699e..6d9f3198fe26 100644 --- a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationScriptConfiguration.java @@ -7,7 +7,12 @@ */ package org.dspace.submit.migration; +import java.util.List; + +import org.apache.commons.cli.Options; import org.dspace.core.Context; +import org.dspace.scripts.DSpaceCommandLineParameter; +import org.dspace.scripts.configuration.ScriptConfiguration; /** * Subclass of {@link SubmissionFormsMigrationCliScriptConfiguration} to be use in rest/scripts.xml configuration so @@ -15,10 +20,37 @@ * * @author Maria Verdonck (Atmire) on 05/01/2021 */ -public class SubmissionFormsMigrationScriptConfiguration extends SubmissionFormsMigrationCliScriptConfiguration { +public class SubmissionFormsMigrationScriptConfiguration + extends ScriptConfiguration { + + private Class dspaceRunnableClass; + + @Override + public Class getDspaceRunnableClass() { + return this.dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + @Override + public Options getOptions() { + if (options == null) { + Options options = new Options(); + + options.addOption("f", "input-forms", true, "Path to source input-forms.xml file location"); + options.addOption("s", "item-submission", true, "Path to source item-submission.xml file location"); + options.addOption("h", "help", false, "help"); + + super.options = options; + } + return options; + } @Override - public boolean isAllowedToExecute(Context context) { + public boolean isAllowedToExecute(Context context, List commandLineParameters) { // Script is not allowed to be executed from REST side return false; } diff --git a/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java b/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java index dbbb7bbc5e4d..e5cd86f50458 100644 --- a/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java +++ b/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java @@ -11,6 +11,8 @@ import java.util.Date; import java.util.Objects; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.AuthorizeService; @@ -21,6 +23,7 @@ import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; import org.dspace.util.DateMathParser; +import org.dspace.util.TimeHelpers; import org.springframework.beans.factory.annotation.Autowired; /** @@ -28,9 +31,8 @@ * set permission on a file. An option is defined by a name such as "open * access", "embargo", "restricted access", etc. and some optional attributes to * better clarify the constraints and input available to the user. For instance - * an embargo option could allow to set a start date not longer than 3 years, - * etc - * + * an embargo option could allow to set a start date not longer than 3 years. + * * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.it) */ public class AccessConditionOption { @@ -44,9 +46,9 @@ public class AccessConditionOption { @Autowired private ResourcePolicyService resourcePolicyService; - DateMathParser dateMathParser = new DateMathParser(); + private static final Logger LOG = LogManager.getLogger(); - /** An unique name identifying the access contion option **/ + /** A unique name identifying the access condition option. **/ private String name; /** @@ -147,6 +149,9 @@ public void setEndDateLimit(String endDateLimit) { * startDate should be null. Otherwise startDate may not be null. * @param endDate end date of the resource policy. If {@link #getHasEndDate()} returns false, * endDate should be null. Otherwise endDate may not be null. + * @throws SQLException passed through. + * @throws AuthorizeException passed through. + * @throws ParseException passed through (indicates problem with a date). */ public void createResourcePolicy(Context context, DSpaceObject obj, String name, String description, Date startDate, Date endDate) @@ -160,7 +165,7 @@ public void createResourcePolicy(Context context, DSpaceObject obj, String name, /** * Validate ResourcePolicy and after update it - * + * * @param context DSpace context * @param resourcePolicy ResourcePolicy to update * @throws SQLException If database error @@ -175,17 +180,25 @@ public void updateResourcePolicy(Context context, ResourcePolicy resourcePolicy) } /** - * Validate the policy properties, throws exceptions if any is not valid - * - * @param context DSpace context - * @param name Name of the resource policy - * @param startDate Start date of the resource policy. If {@link #getHasStartDate()} - * returns false, startDate should be null. Otherwise startDate may not be null. - * @param endDate End date of the resource policy. If {@link #getHasEndDate()} - * returns false, endDate should be null. Otherwise endDate may not be null. + * Validate the policy properties, throws exceptions if any is not valid. + * + * @param context DSpace context. + * @param name Name of the resource policy. + * @param startDate Start date of the resource policy. If + * {@link #getHasStartDate()} returns false, startDate + * should be null. Otherwise startDate may not be null. + * @param endDate End date of the resource policy. If + * {@link #getHasEndDate()} returns false, endDate should + * be null. Otherwise endDate may not be null. + * @throws IllegalStateException if a date is required and absent, + * a date is not required and present, or a date exceeds its + * configured maximum. + * @throws ParseException passed through. */ - private void validateResourcePolicy(Context context, String name, Date startDate, Date endDate) - throws SQLException, AuthorizeException, ParseException { + public void validateResourcePolicy(Context context, String name, Date startDate, Date endDate) + throws IllegalStateException, ParseException { + LOG.debug("Validate policy dates: name '{}', startDate {}, endDate {}", + name, startDate, endDate); if (getHasStartDate() && Objects.isNull(startDate)) { throw new IllegalStateException("The access condition " + getName() + " requires a start date."); } @@ -199,29 +212,33 @@ private void validateResourcePolicy(Context context, String name, Date startDate throw new IllegalStateException("The access condition " + getName() + " cannot contain an end date."); } + DateMathParser dateMathParser = new DateMathParser(); + Date latestStartDate = null; if (Objects.nonNull(getStartDateLimit())) { - latestStartDate = dateMathParser.parseMath(getStartDateLimit()); + latestStartDate = TimeHelpers.toMidnightUTC(dateMathParser.parseMath(getStartDateLimit())); } Date latestEndDate = null; if (Objects.nonNull(getEndDateLimit())) { - latestEndDate = dateMathParser.parseMath(getEndDateLimit()); + latestEndDate = TimeHelpers.toMidnightUTC(dateMathParser.parseMath(getEndDateLimit())); } + LOG.debug(" latestStartDate {}, latestEndDate {}", + latestStartDate, latestEndDate); // throw if startDate after latestStartDate if (Objects.nonNull(startDate) && Objects.nonNull(latestStartDate) && startDate.after(latestStartDate)) { throw new IllegalStateException(String.format( - "The start date of access condition %s should be earlier than %s from now.", - getName(), getStartDateLimit() + "The start date of access condition %s should be earlier than %s from now (%s).", + getName(), getStartDateLimit(), dateMathParser.getNow() )); } // throw if endDate after latestEndDate if (Objects.nonNull(endDate) && Objects.nonNull(latestEndDate) && endDate.after(latestEndDate)) { throw new IllegalStateException(String.format( - "The end date of access condition %s should be earlier than %s from now.", - getName(), getEndDateLimit() + "The end date of access condition %s should be earlier than %s from now (%s).", + getName(), getEndDateLimit(), dateMathParser.getNow() )); } } diff --git a/dspace-api/src/main/java/org/dspace/submit/service/SubmissionConfigService.java b/dspace-api/src/main/java/org/dspace/submit/service/SubmissionConfigService.java new file mode 100644 index 000000000000..66f778947ab5 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/submit/service/SubmissionConfigService.java @@ -0,0 +1,50 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.submit.service; + +import java.sql.SQLException; +import java.util.List; + +import org.dspace.app.util.SubmissionConfig; +import org.dspace.app.util.SubmissionConfigReaderException; +import org.dspace.app.util.SubmissionStepConfig; +import org.dspace.content.Collection; +import org.dspace.core.Context; + +/** + * Item Submission Configuration Service + * enables interaction with a submission config like + * getting a config by a collection name or handle + * as also retrieving submission configuration steps + * + * @author paulo.graca at fccn.pt + */ +public interface SubmissionConfigService { + + public void reload() throws SubmissionConfigReaderException; + + public String getDefaultSubmissionConfigName(); + + public List getAllSubmissionConfigs(Integer limit, Integer offset); + + public int countSubmissionConfigs(); + + public SubmissionConfig getSubmissionConfigByCollection(String collectionHandle); + + public SubmissionConfig getSubmissionConfigByCollection(Collection collection); + + public SubmissionConfig getSubmissionConfigByName(String submitName); + + public SubmissionStepConfig getStepConfig(String stepID) + throws SubmissionConfigReaderException; + + public List getCollectionsBySubmissionConfig(Context context, String submitName) + throws IllegalStateException, SQLException, SubmissionConfigReaderException; + + SubmissionConfig getCorrectionSubmissionConfigByCollection(Collection collection); +} diff --git a/dspace-api/src/main/java/org/dspace/submit/service/SubmissionConfigServiceImpl.java b/dspace-api/src/main/java/org/dspace/submit/service/SubmissionConfigServiceImpl.java new file mode 100644 index 000000000000..b76725107d44 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/submit/service/SubmissionConfigServiceImpl.java @@ -0,0 +1,89 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.submit.service; + +import java.sql.SQLException; +import java.util.List; + +import org.dspace.app.util.SubmissionConfig; +import org.dspace.app.util.SubmissionConfigReader; +import org.dspace.app.util.SubmissionConfigReaderException; +import org.dspace.app.util.SubmissionStepConfig; +import org.dspace.content.Collection; +import org.dspace.core.Context; +import org.springframework.beans.factory.InitializingBean; + +/** + * An implementation for Submission Config service + * + * @author paulo.graca at fccn.pt + */ +public class SubmissionConfigServiceImpl implements SubmissionConfigService, InitializingBean { + + protected SubmissionConfigReader submissionConfigReader; + + public SubmissionConfigServiceImpl () throws SubmissionConfigReaderException { + submissionConfigReader = new SubmissionConfigReader(); + } + + @Override + public void afterPropertiesSet() throws Exception { + submissionConfigReader.reload(); + } + + @Override + public void reload() throws SubmissionConfigReaderException { + submissionConfigReader.reload(); + } + + @Override + public String getDefaultSubmissionConfigName() { + return submissionConfigReader.getDefaultSubmissionConfigName(); + } + + @Override + public List getAllSubmissionConfigs(Integer limit, Integer offset) { + return submissionConfigReader.getAllSubmissionConfigs(limit, offset); + } + + @Override + public int countSubmissionConfigs() { + return submissionConfigReader.countSubmissionConfigs(); + } + + @Override + public SubmissionConfig getSubmissionConfigByCollection(String collectionHandle) { + return submissionConfigReader.getSubmissionConfigByCollection(collectionHandle); + } + + @Override + public SubmissionConfig getSubmissionConfigByCollection(Collection collection) { + return submissionConfigReader.getSubmissionConfigByCollection(collection); + } + + @Override + public SubmissionConfig getSubmissionConfigByName(String submitName) { + return submissionConfigReader.getSubmissionConfigByName(submitName); + } + + @Override + public SubmissionStepConfig getStepConfig(String stepID) throws SubmissionConfigReaderException { + return submissionConfigReader.getStepConfig(stepID); + } + + @Override + public List getCollectionsBySubmissionConfig(Context context, String submitName) + throws IllegalStateException, SQLException { + return submissionConfigReader.getCollectionsBySubmissionConfig(context, submitName); + } + + public SubmissionConfig getCorrectionSubmissionConfigByCollection(Collection collection) { + return submissionConfigReader.getCorrectionSubmissionConfigByCollection(collection); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/ContentGenerator.java b/dspace-api/src/main/java/org/dspace/subscriptions/ContentGenerator.java index a913f2504a50..65f1ae9dcf9b 100644 --- a/dspace-api/src/main/java/org/dspace/subscriptions/ContentGenerator.java +++ b/dspace-api/src/main/java/org/dspace/subscriptions/ContentGenerator.java @@ -11,53 +11,61 @@ import static org.apache.commons.lang.StringUtils.EMPTY; import java.io.ByteArrayOutputStream; -import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Map.Entry; import java.util.Objects; -import java.util.Optional; +import java.util.stream.Collectors; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.dspace.content.Item; import org.dspace.content.crosswalk.StreamDisseminationCrosswalk; -import org.dspace.content.service.ItemService; -import org.dspace.core.Context; import org.dspace.core.Email; import org.dspace.core.I18nUtil; -import org.dspace.discovery.IndexableObject; import org.dspace.eperson.EPerson; -import org.dspace.subscriptions.service.SubscriptionGenerator; -import org.springframework.beans.factory.annotation.Autowired; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; + /** * Implementation class of SubscriptionGenerator * which will handle the logic of sending the emails * in case of 'content' subscriptionType */ -@SuppressWarnings("rawtypes") -public class ContentGenerator implements SubscriptionGenerator { +public class ContentGenerator { private final Logger log = LogManager.getLogger(ContentGenerator.class); + private final ConfigurationService configurationService = DSpaceServicesFactory.getInstance() + .getConfigurationService(); - @SuppressWarnings("unchecked") - private Map entityType2Disseminator = new HashMap(); - - @Autowired - private ItemService itemService; + private Map entityType2Disseminator; - @Override - public void notifyForSubscriptions(Context context, EPerson ePerson, - List indexableComm, - List indexableColl) { + public void notifyForSubscriptions(EPerson ePerson, + List indexableComm, + List indexableColl, + Map> indexableEntityByType) { try { if (Objects.nonNull(ePerson)) { Locale supportedLocale = I18nUtil.getEPersonLocale(ePerson); Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "subscriptions_content")); email.addRecipient(ePerson.getEmail()); - email.addArgument(generateBodyMail(context, indexableComm)); - email.addArgument(generateBodyMail(context, indexableColl)); + + String bodyCommunities = generateBodyMail("Community", indexableComm); + String bodyCollections = generateBodyMail("Collection", indexableColl); + if (bodyCommunities.equals(EMPTY) && bodyCollections.equals(EMPTY)) { + log.debug("subscription(s) of eperson {} do(es) not match any new items: nothing to send" + + " - exit silently", ePerson::getID); + return; + } + email.addArgument(configurationService.getProperty("subscription.url")); + email.addArgument(bodyCommunities); + email.addArgument(bodyCollections); + email.addArgument( + indexableEntityByType.entrySet().stream() + .map(entry -> generateBodyMail(entry.getKey(), entry.getValue())) + .collect(Collectors.joining("\n\n")) + ); email.send(); } } catch (Exception e) { @@ -66,22 +74,27 @@ public void notifyForSubscriptions(Context context, EPerson ePerson, } } - private String generateBodyMail(Context context, List indexableObjects) { + private String generateBodyMail(String type, List subscriptionItems) { + if (subscriptionItems == null || subscriptionItems.isEmpty()) { + return EMPTY; + } try { ByteArrayOutputStream out = new ByteArrayOutputStream(); - out.write("\n".getBytes(UTF_8)); - if (indexableObjects.size() > 0) { - for (IndexableObject indexableObject : indexableObjects) { + out.write(("\nYou have " + subscriptionItems.size() + " subscription(s) active to type " + type + "\n") + .getBytes(UTF_8)); + for (SubscriptionItem item : subscriptionItems) { + out.write("\n".getBytes(UTF_8)); + out.write("List of new content for the\n".getBytes(UTF_8)); + out.write((type + " \"" + item.getName() + "\" - " + item.getUrl() + "\n") + .getBytes(UTF_8)); + + for (Entry entry : item.getItemUrlsByItemName().entrySet()) { out.write("\n".getBytes(UTF_8)); - Item item = (Item) indexableObject.getIndexedObject(); - String entityType = itemService.getEntityTypeLabel(item); - Optional.ofNullable(entityType2Disseminator.get(entityType)) - .orElseGet(() -> entityType2Disseminator.get("Item")) - .disseminate(context, item, out); + out.write((entry.getKey() + " - " + entry.getValue()).getBytes(UTF_8)); } - return out.toString(); - } else { - out.write("No items".getBytes(UTF_8)); + //Optional.ofNullable(entityType2Disseminator.get(type)) + // .orElseGet(() -> entityType2Disseminator.get("Item")) + // .disseminate(context, item, out); } return out.toString(); } catch (Exception e) { diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/StatisticsGenerator.java b/dspace-api/src/main/java/org/dspace/subscriptions/StatisticsGenerator.java index ae5fd931da76..43ff6b71d4b5 100644 --- a/dspace-api/src/main/java/org/dspace/subscriptions/StatisticsGenerator.java +++ b/dspace-api/src/main/java/org/dspace/subscriptions/StatisticsGenerator.java @@ -27,7 +27,6 @@ import org.dspace.core.Email; import org.dspace.eperson.EPerson; import org.dspace.services.ConfigurationService; -import org.dspace.subscriptions.service.SubscriptionGenerator; import org.springframework.beans.factory.annotation.Autowired; @@ -38,25 +37,24 @@ * * @author Alba Aliu */ -public class StatisticsGenerator implements SubscriptionGenerator { +public class StatisticsGenerator { private static final Logger log = LogManager.getLogger(StatisticsGenerator.class); @Autowired private ConfigurationService configurationService; - @Override - public void notifyForSubscriptions(Context c, EPerson ePerson, List crisMetricsList, - List crisMetricsList1) { - // find statistics for all the subscribed objects + public void notifyForSubscriptions(Context c, EPerson ePerson, List crisMetricsList) { try { // send the notification to the user - if (Objects.nonNull(ePerson) && crisMetricsList.size() > 0) { + if (Objects.nonNull(ePerson) && !crisMetricsList.isEmpty()) { Email email = new Email(); String name = configurationService.getProperty("dspace.name"); File attachment = generateExcel(crisMetricsList, c); email.addAttachment(attachment, "subscriptions.xlsx"); + email.setSubject(name + ": Statistics of records which you are subscribed"); email.setContent("intro", - "This automatic email is sent by " + name + " based on the subscribed statistics updates."); + "This automatic email is sent by " + name + " based on the subscribed statistics updates.\n\n" + + "See additional details in the file attached."); email.send(); } } catch (Exception ex) { diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotification.java b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotification.java index b429ecbd46e7..cc5cac24eabb 100644 --- a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotification.java +++ b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotification.java @@ -48,16 +48,23 @@ public void setup() throws ParseException { public void internalRun() throws Exception { assignCurrentUserInContext(); assignSpecialGroupsInContext(); + String typeOption = commandLine.getOptionValue("t"); String frequencyOption = commandLine.getOptionValue("f"); - if (StringUtils.isBlank(frequencyOption)) { - throw new IllegalArgumentException("Option --frequency (-f) must be set"); + if (StringUtils.isBlank(frequencyOption) || StringUtils.isBlank(typeOption)) { + throw new IllegalArgumentException("Options --frequency (-f) and --type (-t) must be set"); } if (!FrequencyType.isSupportedFrequencyType(frequencyOption)) { throw new IllegalArgumentException( "Option f must be one of following values D(Day), W(Week) or M(Month)"); } - subscriptionEmailNotificationService.perform(getContext(), handler, "content", frequencyOption); + + if (!StringUtils.equalsAny(typeOption, "content", "statistics")) { + throw new IllegalArgumentException( + "Option t (type) must be one of \"content\" or \"statistics\""); + } + + subscriptionEmailNotificationService.perform(getContext(), handler, typeOption, frequencyOption); } private void assignCurrentUserInContext() throws SQLException { diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java index 52685b563d9b..d9a297e1f3d4 100644 --- a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java @@ -8,15 +8,11 @@ package org.dspace.subscriptions; -import java.sql.SQLException; import java.util.Objects; import org.apache.commons.cli.Options; -import org.dspace.authorize.AuthorizeServiceImpl; -import org.dspace.core.Context; import org.dspace.scripts.DSpaceRunnable; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * Implementation of {@link DSpaceRunnable} to find subscribed objects and send notification mails about them @@ -26,22 +22,13 @@ public class SubscriptionEmailNotificationConfiguration dspaceRunnableClass; - @Autowired - private AuthorizeServiceImpl authorizeService; - - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (Objects.isNull(options)) { Options options = new Options(); + options.addOption("t", "type", true, + "Subscription type, Valid values are \"content\" or \"statistics\""); + options.getOption("t").setRequired(true); options.addOption("f", "frequency", true, "Subscription frequency. Valid values include: D (Day), W (Week) and M (Month)"); options.getOption("f").setRequired(true); diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationService.java b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationService.java index 95272235095a..7a7c36491278 100644 --- a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationService.java +++ b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationService.java @@ -7,7 +7,7 @@ */ package org.dspace.subscriptions; -import java.util.Set; +import java.util.List; import org.dspace.core.Context; import org.dspace.scripts.handler.DSpaceRunnableHandler; @@ -32,6 +32,6 @@ public interface SubscriptionEmailNotificationService { /** * returns a set of supported SubscriptionTypes */ - public Set getSupportedSubscriptionTypes(); + public List getSupportedSubscriptionTypes(); } diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationServiceImpl.java b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationServiceImpl.java index 8fb01cd36e92..d11c7ab089ce 100644 --- a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationServiceImpl.java @@ -7,9 +7,12 @@ */ package org.dspace.subscriptions; +import static org.dspace.content.Item.ANY; import static org.dspace.core.Constants.COLLECTION; import static org.dspace.core.Constants.COMMUNITY; +import static org.dspace.core.Constants.ITEM; import static org.dspace.core.Constants.READ; +import static org.dspace.subscriptions.SubscriptionItem.fromItem; import java.sql.SQLException; import java.util.ArrayList; @@ -17,16 +20,18 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.stream.Collectors; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.dspace.app.metrics.CrisMetrics; +import org.dspace.app.metrics.service.CrisMetricsService; import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.DSpaceObject; import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; import org.dspace.core.Context; import org.dspace.discovery.IndexableObject; import org.dspace.eperson.EPerson; @@ -35,7 +40,6 @@ import org.dspace.scripts.DSpaceRunnable; import org.dspace.scripts.handler.DSpaceRunnableHandler; import org.dspace.subscriptions.service.DSpaceObjectUpdates; -import org.dspace.subscriptions.service.SubscriptionGenerator; import org.springframework.beans.factory.annotation.Autowired; /** @@ -47,79 +51,115 @@ public class SubscriptionEmailNotificationServiceImpl implements SubscriptionEma private static final Logger log = LogManager.getLogger(SubscriptionEmailNotificationServiceImpl.class); - private Map contentUpdates = new HashMap<>(); - @SuppressWarnings("rawtypes") - private Map subscriptionType2generators = new HashMap<>(); + private final Map contentUpdates; + private final ContentGenerator contentGenerator; + private final StatisticsGenerator statisticsGenerator; + private final List supportedSubscriptionTypes; @Autowired private AuthorizeService authorizeService; @Autowired private SubscribeService subscribeService; + @Autowired + private CrisMetricsService crisMetricsService; - @SuppressWarnings("rawtypes") public SubscriptionEmailNotificationServiceImpl(Map contentUpdates, - Map subscriptionType2generators) { + ContentGenerator contentGenerator, + StatisticsGenerator statisticsGenerator, + List supportedSubscriptionTypes) { this.contentUpdates = contentUpdates; - this.subscriptionType2generators = subscriptionType2generators; + this.contentGenerator = contentGenerator; + this.statisticsGenerator = statisticsGenerator; + this.supportedSubscriptionTypes = supportedSubscriptionTypes; } - @SuppressWarnings({ "rawtypes", "unchecked" }) public void perform(Context context, DSpaceRunnableHandler handler, String subscriptionType, String frequency) { - List communityItems = new ArrayList<>(); - List collectionsItems = new ArrayList<>(); + // Verify if subscriptionType is "content" or "subscription" + if (supportedSubscriptionTypes.get(0).equals(subscriptionType)) { + performForContent(context, handler, subscriptionType, frequency); + } else if (supportedSubscriptionTypes.get(1).equals(subscriptionType)) { + performForStatistics(context, subscriptionType, frequency); + } else { + throw new IllegalArgumentException( + "Currently this SubscriptionType:" + subscriptionType + " is not supported!"); + } + } + + @SuppressWarnings({ "rawtypes" }) + private void performForContent(Context context, DSpaceRunnableHandler handler, + String subscriptionType, String frequency) { try { List subscriptions = - findAllSubscriptionsBySubscriptionTypeAndFrequency(context, subscriptionType, frequency); - // Here is verified if SubscriptionType is "content" Or "statistics" as them are configured - if (subscriptionType2generators.keySet().contains(subscriptionType)) { - // the list of the person who has subscribed - int iterator = 0; - for (Subscription subscription : subscriptions) { - DSpaceObject dSpaceObject = subscription.getDSpaceObject(); - EPerson ePerson = subscription.getEPerson(); - - if (!authorizeService.authorizeActionBoolean(context, ePerson, dSpaceObject, READ, true)) { - iterator++; - continue; - } + findAllSubscriptionsBySubscriptionTypeAndFrequency(context, subscriptionType, frequency); + List communityItems = new ArrayList<>(); + List collectionsItems = new ArrayList<>(); + Map> entityItemsByEntityType = new HashMap<>(); + int iterator = 0; - if (dSpaceObject.getType() == COMMUNITY) { - List indexableCommunityItems = contentUpdates - .get(Community.class.getSimpleName().toLowerCase()) - .findUpdates(context, dSpaceObject, frequency); - communityItems.addAll(getItems(context, ePerson, indexableCommunityItems)); - } else if (dSpaceObject.getType() == COLLECTION) { - List indexableCollectionItems = contentUpdates - .get(Collection.class.getSimpleName().toLowerCase()) - .findUpdates(context, dSpaceObject, frequency); - collectionsItems.addAll(getItems(context, ePerson, indexableCollectionItems)); - } else { + for (Subscription subscription : subscriptions) { + DSpaceObject dSpaceObject = subscription.getDSpaceObject(); + EPerson ePerson = subscription.getEPerson(); + + if (!authorizeService.authorizeActionBoolean(context, ePerson, dSpaceObject, READ, true)) { + iterator++; + continue; + } + + switch (dSpaceObject.getType()) { + case COMMUNITY: + List indexableCommunityItems = getItems( + context, ePerson, + contentUpdates.get(Community.class.getSimpleName().toLowerCase()) + .findUpdates(context, dSpaceObject, frequency) + ); + communityItems.add(fromItem(dSpaceObject, indexableCommunityItems)); + break; + case COLLECTION: + List indexableCollectionItems = getItems( + context, ePerson, + contentUpdates.get(Collection.class.getSimpleName().toLowerCase()) + .findUpdates(context, dSpaceObject, frequency) + ); + collectionsItems.add(fromItem(dSpaceObject, indexableCollectionItems)); + break; + case ITEM: + List indexableEntityItems = getItems( + context, ePerson, contentUpdates.get(Item.class.getSimpleName().toLowerCase()) + .findUpdates(context, dSpaceObject, frequency) + ); + String dspaceType = ContentServiceFactory + .getInstance().getDSpaceObjectService(dSpaceObject) + .getMetadataFirstValue(dSpaceObject, "dspace", "entity", "type", ANY); + + entityItemsByEntityType.computeIfAbsent(dspaceType, k -> new ArrayList<>()) + .add(fromItem(dSpaceObject, indexableEntityItems)); + break; + default: log.warn("found an invalid DSpace Object type ({}) among subscriptions to send", dSpaceObject.getType()); continue; - } + } - if (iterator < subscriptions.size() - 1) { - // as the subscriptions are ordered by eperson id, so we send them by ePerson - if (ePerson.equals(subscriptions.get(iterator + 1).getEPerson())) { - iterator++; - continue; - } else { - subscriptionType2generators.get(subscriptionType) - .notifyForSubscriptions(context, ePerson, communityItems, collectionsItems); - communityItems.clear(); - collectionsItems.clear(); - } + if (iterator < subscriptions.size() - 1) { + // as the subscriptions are ordered by eperson id, so we send them by ePerson + if (ePerson.equals(subscriptions.get(iterator + 1).getEPerson())) { + iterator++; + continue; } else { - //in the end of the iteration - subscriptionType2generators.get(subscriptionType) - .notifyForSubscriptions(context, ePerson, communityItems, collectionsItems); + contentGenerator.notifyForSubscriptions( + ePerson, communityItems, collectionsItems, entityItemsByEntityType + ); + communityItems.clear(); + collectionsItems.clear(); + entityItemsByEntityType.clear(); } - iterator++; + } else { + //in the end of the iteration + contentGenerator.notifyForSubscriptions( + ePerson, communityItems, collectionsItems, entityItemsByEntityType + ); } - } else { - throw new IllegalArgumentException("Currently this SubscriptionType:" + subscriptionType + - " is not supported!"); + iterator++; } } catch (Exception e) { log.error(e.getMessage(), e); @@ -128,14 +168,43 @@ public void perform(Context context, DSpaceRunnableHandler handler, String subsc } } + private void performForStatistics(Context context, String subscriptionType, String frequency) { + List subscriptions = + findAllSubscriptionsBySubscriptionTypeAndFrequency(context, subscriptionType, frequency); + List crisMetricsList = new ArrayList<>(); + int iterator = 0; + + for (Subscription subscription : subscriptions) { + EPerson ePerson = subscription.getEPerson(); + DSpaceObject dSpaceObject = subscription.getDSpaceObject(); + try { + crisMetricsList.addAll(crisMetricsService.findAllByDSO(context, dSpaceObject)); + } catch (Exception e) { + log.error(e.getMessage()); + } + if (iterator < subscriptions.size() - 1) { + if (ePerson.equals(subscriptions.get(iterator + 1).getEPerson())) { + iterator++; + continue; + } else { + statisticsGenerator.notifyForSubscriptions(context, ePerson, crisMetricsList); + } + } else { + //in the end of the iteration + statisticsGenerator.notifyForSubscriptions(context, ePerson, crisMetricsList); + } + iterator++; + } + } + @SuppressWarnings("rawtypes") private List getItems(Context context, EPerson ePerson, List indexableItems) throws SQLException { List items = new ArrayList(); - for (IndexableObject indexableitem : indexableItems) { - Item item = (Item) indexableitem.getIndexedObject(); + for (IndexableObject indexableItem : indexableItems) { + Item item = (Item) indexableItem.getIndexedObject(); if (authorizeService.authorizeActionBoolean(context, ePerson, item, READ, true)) { - items.add(indexableitem); + items.add(indexableItem); } } return items; @@ -148,25 +217,25 @@ private List getItems(Context context, EPerson ePerson, List findAllSubscriptionsBySubscriptionTypeAndFrequency(Context context, String subscriptionType, String frequency) { try { return subscribeService.findAllSubscriptionsBySubscriptionTypeAndFrequency(context, subscriptionType, - frequency) + frequency) .stream() .sorted(Comparator.comparing(s -> s.getEPerson().getID())) .collect(Collectors.toList()); } catch (SQLException e) { log.error(e.getMessage(), e); } - return new ArrayList(); + return new ArrayList<>(); } @Override - public Set getSupportedSubscriptionTypes() { - return subscriptionType2generators.keySet(); + public List getSupportedSubscriptionTypes() { + return supportedSubscriptionTypes; } } diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionItem.java b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionItem.java new file mode 100644 index 000000000000..3254635b015f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionItem.java @@ -0,0 +1,74 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.subscriptions; + +import static java.util.stream.Collectors.toMap; + +import java.util.List; +import java.util.Map; + +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.discovery.IndexableObject; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; + +public class SubscriptionItem { + + private static final ConfigurationService configurationService = DSpaceServicesFactory.getInstance() + .getConfigurationService(); + + private String name; + private String url; + private Map itemUrlsByItemName; + + public SubscriptionItem(String name, String url, Map itemUrlsByItemName) { + this.name = name; + this.url = url; + this.itemUrlsByItemName = itemUrlsByItemName; + } + + @SuppressWarnings({ "rawtypes" }) + static SubscriptionItem fromItem(DSpaceObject dSpaceObject, List relatedItems) { + return new SubscriptionItem( + dSpaceObject.getName(), + buildUrlForItem(dSpaceObject.getHandle()), + relatedItems.stream() + .map(obj -> (Item) obj.getIndexedObject()) + .collect(toMap(Item::getName, item -> buildUrlForItem(item.getHandle()))) + ); + } + + private static String buildUrlForItem(String handle) { + return configurationService.getProperty("dspace.ui.url") + "/handle/" + handle; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public Map getItemUrlsByItemName() { + return itemUrlsByItemName; + } + + public void setItemUrlsByItemName(Map itemUrlsByItemName) { + this.itemUrlsByItemName = itemUrlsByItemName; + } +} diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/dSpaceObjectsUpdates/ItemsUpdates.java b/dspace-api/src/main/java/org/dspace/subscriptions/dSpaceObjectsUpdates/ItemsUpdates.java index bf0c1ab28e93..40fcd81dafa5 100644 --- a/dspace-api/src/main/java/org/dspace/subscriptions/dSpaceObjectsUpdates/ItemsUpdates.java +++ b/dspace-api/src/main/java/org/dspace/subscriptions/dSpaceObjectsUpdates/ItemsUpdates.java @@ -40,6 +40,7 @@ import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.subscriptions.ContentGenerator; import org.dspace.subscriptions.service.DSpaceObjectUpdates; +import org.springframework.beans.factory.annotation.Autowired; /** @@ -49,10 +50,20 @@ * @author Alba Aliu */ public class ItemsUpdates implements DSpaceObjectUpdates { - private final CollectionService collectionService; - private final CommunityService communityService; - private final ItemService itemService; + + @Autowired + private CollectionService collectionService; + + @Autowired + private CommunityService communityService; + + @Autowired + private ItemService itemService; + + @Autowired private DiscoveryConfigurationService searchConfigurationService; + + @Autowired private SearchService searchService; private final Logger log = org.apache.logging.log4j.LogManager.getLogger(ContentGenerator.class); @@ -189,12 +200,4 @@ private DiscoverQuery buildBaseQuery(DiscoveryConfiguration discoveryConfigurati return discoverQuery; } - public ItemsUpdates(CollectionService collectionService, CommunityService communityService, ItemService itemService, - DiscoveryConfigurationService searchConfigurationService, SearchService searchService) { - this.collectionService = collectionService; - this.communityService = communityService; - this.itemService = itemService; - this.searchConfigurationService = searchConfigurationService; - this.searchService = searchService; - } } diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/service/SubscriptionGenerator.java b/dspace-api/src/main/java/org/dspace/subscriptions/service/SubscriptionGenerator.java deleted file mode 100644 index 1790513b9b79..000000000000 --- a/dspace-api/src/main/java/org/dspace/subscriptions/service/SubscriptionGenerator.java +++ /dev/null @@ -1,25 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.subscriptions.service; - -import java.util.List; - -import org.dspace.core.Context; -import org.dspace.eperson.EPerson; - -/** - * Interface Class which will be used to send email notifications to ePerson - * containing information for all list of objects. - * - * @author Alba Aliu - */ -public interface SubscriptionGenerator { - - public void notifyForSubscriptions(Context c, EPerson ePerson, List comm, List coll); - -} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/usage/UsageEvent.java b/dspace-api/src/main/java/org/dspace/usage/UsageEvent.java index ed137e9d6d8c..32380c8cbf17 100644 --- a/dspace-api/src/main/java/org/dspace/usage/UsageEvent.java +++ b/dspace-api/src/main/java/org/dspace/usage/UsageEvent.java @@ -7,9 +7,12 @@ */ package org.dspace.usage; +import java.sql.SQLException; +import java.util.UUID; import javax.servlet.http.HttpServletRequest; import org.dspace.content.DSpaceObject; +import org.dspace.content.service.DSpaceObjectService; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.services.model.Event; @@ -19,6 +22,20 @@ */ public class UsageEvent extends Event { + public static final UsageEvent createUsageEvent( + final Context context, final HttpServletRequest req, + final DSpaceObjectService dSpaceObjectService, final UUID targetId, + final String referrer + ) { + try { + return new UsageEvent( + UsageEvent.Action.VIEW, req, context, dSpaceObjectService.find(context, targetId), referrer + ); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + public static enum Action { VIEW("view"), CREATE("create"), @@ -65,6 +82,8 @@ String text() { private Action action; + private String referrer; + private static String checkParams(Action action, HttpServletRequest request, Context context, DSpaceObject object) { StringBuilder eventName = new StringBuilder(); if (action == null) { @@ -187,6 +206,12 @@ public UsageEvent(Action action, String ip, String userAgent, String xforwardedf this.object = object; } + public UsageEvent(Action action, HttpServletRequest request, Context context, DSpaceObject object, + String referrer) { + this(action, request, context, object); + setReferrer(referrer); + } + public HttpServletRequest getRequest() { return request; @@ -240,4 +265,11 @@ public Action getAction() { return this.action; } + public String getReferrer() { + return referrer; + } + + public void setReferrer(String referrer) { + this.referrer = referrer; + } } diff --git a/dspace-api/src/main/java/org/dspace/util/DateMathParser.java b/dspace-api/src/main/java/org/dspace/util/DateMathParser.java index 7c3e13a28e13..9ff252e8ce3f 100644 --- a/dspace-api/src/main/java/org/dspace/util/DateMathParser.java +++ b/dspace-api/src/main/java/org/dspace/util/DateMathParser.java @@ -26,12 +26,15 @@ import java.util.TimeZone; import java.util.regex.Pattern; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; /** - * This class (Apache license) is copied from Apache Solr and add some tweaks to resolve unneeded dependency: - * https://raw.githubusercontent.com/apache/lucene-solr/releases/lucene-solr/7.1.0/solr/core/src/java/org/apache/solr - * /util/DateMathParser.java + * This class (Apache license) is copied from Apache Solr, adding some tweaks to + * resolve an unneeded dependency. See + * the original. * + *

      * A Simple Utility class for parsing "math" like strings relating to Dates. * *

      @@ -78,7 +81,7 @@ * "setNow" in the interim). The default value of 'now' is * the time at the moment the DateMathParser instance is * constructed, unless overridden by the {@link CommonParams#NOW NOW} - * request param. + * request parameter. *

      * *

      @@ -88,7 +91,7 @@ * cascades to rounding of HOUR, MIN, MONTH, YEAR as well. The default * TimeZone used is UTC unless overridden by the * {@link CommonParams#TZ TZ} - * request param. + * request parameter. *

      * *

      @@ -102,6 +105,8 @@ */ public class DateMathParser { + private static final Logger LOG = LogManager.getLogger(); + public static final TimeZone UTC = TimeZone.getTimeZone("UTC"); /** @@ -119,12 +124,12 @@ public class DateMathParser { /** * A mapping from (uppercased) String labels identifying time units, - * to the corresponding {@link ChronoUnit} enum (e.g. "YEARS") used to + * to the corresponding {@link ChronoUnit} value (e.g. "YEARS") used to * set/add/roll that unit of measurement. * *

      * A single logical unit of time might be represented by multiple labels - * for convenience (ie: DATE==DAYS, + * for convenience (i.e. DATE==DAYS, * MILLI==MILLIS) *

      * @@ -220,6 +225,7 @@ private static LocalDateTime round(LocalDateTime t, String unit) { * * @param now an optional fixed date to use as "NOW" * @param val the string to parse + * @return result of applying the parsed expression to "NOW". * @throws Exception */ public static Date parseMath(Date now, String val) throws Exception { @@ -308,6 +314,7 @@ public TimeZone getTimeZone() { /** * Defines this instance's concept of "now". * + * @param n new value of "now". * @see #getNow */ public void setNow(Date n) { @@ -316,12 +323,12 @@ public void setNow(Date n) { /** * Returns a clone of this instance's concept of "now" (never null). - * * If setNow was never called (or if null was specified) then this method * first defines 'now' as the value dictated by the SolrRequestInfo if it * exists -- otherwise it uses a new Date instance at the moment getNow() * is first called. * + * @return "now". * @see #setNow * @see SolrRequestInfo#getNOW */ @@ -334,9 +341,12 @@ public Date getNow() { } /** - * Parses a string of commands relative "now" are returns the resulting Date. + * Parses a date expression relative to "now". * - * @throws ParseException positions in ParseExceptions are token positions, not character positions. + * @param math a date expression such as "+24MONTHS". + * @return the result of applying the expression to the current time. + * @throws ParseException positions in ParseExceptions are token positions, + * not character positions. */ public Date parseMath(String math) throws ParseException { /* check for No-Op */ @@ -344,6 +354,8 @@ public Date parseMath(String math) throws ParseException { return getNow(); } + LOG.debug("parsing {}", math); + ZoneId zoneId = zone.toZoneId(); // localDateTime is a date and time local to the timezone specified LocalDateTime localDateTime = ZonedDateTime.ofInstant(getNow().toInstant(), zoneId).toLocalDateTime(); @@ -394,11 +406,44 @@ public Date parseMath(String math) throws ParseException { } } + LOG.debug("returning {}", localDateTime); return Date.from(ZonedDateTime.of(localDateTime, zoneId).toInstant()); } private static Pattern splitter = Pattern.compile("\\b|(?<=\\d)(?=\\D)"); + /** + * For manual testing. With one argument, test one-argument parseMath. + * With two (or more) arguments, test two-argument parseMath. + * + * @param argv date math expressions. + * @throws java.lang.Exception passed through. + */ + public static void main(String[] argv) + throws Exception { + DateMathParser parser = new DateMathParser(); + try { + Date parsed; + + if (argv.length <= 0) { + System.err.println("Date math expression(s) expected."); + } + + if (argv.length > 0) { + parsed = parser.parseMath(argv[0]); + System.out.format("Applied %s to implicit current time: %s%n", + argv[0], parsed.toString()); + } + + if (argv.length > 1) { + parsed = DateMathParser.parseMath(new Date(), argv[1]); + System.out.format("Applied %s to explicit current time: %s%n", + argv[1], parsed.toString()); + } + } catch (ParseException ex) { + System.err.format("Oops: %s%n", ex.getMessage()); + } + } } diff --git a/dspace-api/src/main/java/org/dspace/util/FrontendUrlService.java b/dspace-api/src/main/java/org/dspace/util/FrontendUrlService.java new file mode 100644 index 000000000000..a50baf910e77 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/FrontendUrlService.java @@ -0,0 +1,87 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; +import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.apache.commons.lang3.StringUtils.lowerCase; + +import java.util.List; +import java.util.Optional; + +import org.dspace.content.Bitstream; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.DiscoverResult; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.services.ConfigurationService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * Service class for generation of front-end urls. + */ +@Component +public class FrontendUrlService { + + private static final Logger log = LoggerFactory.getLogger(FrontendUrlService.class); + + @Autowired + private ConfigurationService configurationService; + + @Autowired + private SearchService searchService; + + /** + * Generates front-end url for specified item. + * + * @param context context + * @param item item + * @return front-end url + */ + public String generateUrl(Context context, Item item) { + String uiURL = configurationService.getProperty("dspace.ui.url"); + return generateUrlWithSearchService(item, uiURL, context) + .orElseGet(() -> uiURL + "/items/" + item.getID()); + } + + /** + * Generates front-end url for specified bitstream. + * + * @param bitstream bitstream + * @return front-end url + */ + public String generateUrl(Bitstream bitstream) { + String uiURL = configurationService.getProperty("dspace.ui.url"); + return uiURL + "/bitstreams/" + bitstream.getID() + "/download"; + } + + private Optional generateUrlWithSearchService(Item item, String uiURLStem, Context context) { + DiscoverQuery entityQuery = new DiscoverQuery(); + entityQuery.setQuery("search.uniqueid:\"Item-" + item.getID() + "\" and entityType:*"); + entityQuery.addSearchField("entityType"); + + try { + DiscoverResult discoverResult = searchService.search(context, entityQuery); + if (isNotEmpty(discoverResult.getIndexableObjects())) { + List entityTypes = discoverResult.getSearchDocument(discoverResult.getIndexableObjects() + .get(0)).get(0).getSearchFieldValues("entityType"); + if (isNotEmpty(entityTypes) && isNotBlank(entityTypes.get(0))) { + return Optional.of(uiURLStem + "/entities/" + lowerCase(entityTypes.get(0)) + "/" + item.getID()); + } + } + } catch (SearchServiceException e) { + log.error("Failed getting entitytype through solr for item " + item.getID() + ": " + e.getMessage()); + } + return Optional.empty(); + } +} diff --git a/dspace-api/src/main/java/org/dspace/util/FunctionalUtils.java b/dspace-api/src/main/java/org/dspace/util/FunctionalUtils.java index 422c2405a875..66921d041799 100644 --- a/dspace-api/src/main/java/org/dspace/util/FunctionalUtils.java +++ b/dspace-api/src/main/java/org/dspace/util/FunctionalUtils.java @@ -8,6 +8,8 @@ package org.dspace.util; import java.util.Objects; +import java.util.function.Consumer; +import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; @@ -58,4 +60,30 @@ public static T getCheckDefaultOrBuild(Predicate defaultValueChecker, T d return builder.get(); } + public static Consumer throwingConsumerWrapper( + ThrowingConsumer throwingConsumer) { + return i -> { + try { + throwingConsumer.accept(i); + } catch (Exception e) { + throw new RuntimeException(e); + } + }; + } + + public static Function throwingMapperWrapper( + ThrowingMapper throwingConsumer, + R defaultValue + ) { + return i -> { + R value = defaultValue; + try { + value = throwingConsumer.accept(i); + } catch (Exception e) { + throw new RuntimeException(e); + } + return value; + }; + } + } diff --git a/dspace-api/src/main/java/org/dspace/util/MultiFormatDateDeserializer.java b/dspace-api/src/main/java/org/dspace/util/MultiFormatDateDeserializer.java new file mode 100644 index 000000000000..2b6f37beb2e1 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/MultiFormatDateDeserializer.java @@ -0,0 +1,41 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import java.io.IOException; +import java.util.Date; + +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; + +/** + * This is a custom date deserializer for jackson that make use of our + * {@link MultiFormatDateParser} + * + * Dates are parsed as being in the UTC zone. + * + */ +public class MultiFormatDateDeserializer extends StdDeserializer { + + public MultiFormatDateDeserializer() { + this(null); + } + + public MultiFormatDateDeserializer(Class vc) { + super(vc); + } + + @Override + public Date deserialize(JsonParser jsonparser, DeserializationContext context) + throws IOException, JsonProcessingException { + String date = jsonparser.getText(); + return MultiFormatDateParser.parse(date); + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/util/PersonNameUtil.java b/dspace-api/src/main/java/org/dspace/util/PersonNameUtil.java index 0e88a0a9cdf5..cea02c76990b 100644 --- a/dspace-api/src/main/java/org/dspace/util/PersonNameUtil.java +++ b/dspace-api/src/main/java/org/dspace/util/PersonNameUtil.java @@ -16,6 +16,7 @@ import org.apache.commons.collections4.iterators.PermutationIterator; import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.Logger; /** * Utility class that handle person names. @@ -24,6 +25,7 @@ * */ public final class PersonNameUtil { + private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PersonNameUtil.class); private PersonNameUtil() { @@ -35,12 +37,14 @@ private PersonNameUtil() { * @param firstName the first name * @param lastName the last name * @param fullNames the full names + * @param uuid the uuid * @return all the variants of the given names */ - public static Set getAllNameVariants(String firstName, String lastName, List fullNames) { + public static Set getAllNameVariants(String firstName, String lastName, List fullNames, + String uuid) { Set variants = new HashSet(); variants.addAll(getNameVariants(firstName, lastName)); - variants.addAll(getNameVariants(fullNames)); + variants.addAll(getNameVariants(fullNames, uuid)); return variants; } @@ -95,24 +99,30 @@ private static List getNameVariants(String[] firstNames, String lastName return variants; } - private static List getNameVariants(List fullNames) { + private static List getNameVariants(List fullNames, String uuid) { return fullNames.stream() .filter(Objects::nonNull) .map(name -> removeComma(name)) .distinct() - .flatMap(name -> getAllNamePermutations(name).stream()) + .flatMap(name -> getAllNamePermutations(name, uuid).stream()) .distinct() .collect(Collectors.toList()); } - private static List getAllNamePermutations(String name) { + private static List getAllNamePermutations(String name, String uuid) { List namePermutations = new ArrayList(); - PermutationIterator permutationIterator = new PermutationIterator(List.of(name.split(" "))); + List names = List.of(name.split(" ")); + if (names.size() < 5) { + PermutationIterator permutationIterator = new PermutationIterator(names); - while (permutationIterator.hasNext()) { - namePermutations.add(String.join(" ", permutationIterator.next())); + while (permutationIterator.hasNext()) { + namePermutations.add(String.join(" ", permutationIterator.next())); + } + } else { + log.warn(String.format("Cannot retrieve variants on the Person with UUID %s because the name is too long", + uuid)); } return namePermutations; diff --git a/dspace-api/src/main/java/org/dspace/util/SolrUpgradePre6xStatistics.java b/dspace-api/src/main/java/org/dspace/util/SolrUpgradePre6xStatistics.java index 7dcebcc09f52..9342cb8b39e8 100644 --- a/dspace-api/src/main/java/org/dspace/util/SolrUpgradePre6xStatistics.java +++ b/dspace-api/src/main/java/org/dspace/util/SolrUpgradePre6xStatistics.java @@ -447,7 +447,7 @@ private void run() throws SolrServerException, SQLException, IOException { runReport(); logTime(false); for (int processed = updateRecords(MIGQUERY); (processed != 0) - && (numProcessed < numRec); processed = updateRecords(MIGQUERY)) { + && (numProcessed <= numRec); processed = updateRecords(MIGQUERY)) { printTime(numProcessed, false); batchUpdateStats(); if (context.getCacheSize() > CACHE_LIMIT) { @@ -696,4 +696,4 @@ private UUID mapOwner(String owntype, int val) throws SQLException { return null; } -} \ No newline at end of file +} diff --git a/dspace-api/src/main/java/org/dspace/util/SolrUtils.java b/dspace-api/src/main/java/org/dspace/util/SolrUtils.java index 087ea856a6e1..556e6957dc0e 100644 --- a/dspace-api/src/main/java/org/dspace/util/SolrUtils.java +++ b/dspace-api/src/main/java/org/dspace/util/SolrUtils.java @@ -45,7 +45,9 @@ private SolrUtils() { * @return date formatter compatible with Solr. */ public static DateFormat getDateFormatter() { - return new SimpleDateFormat(SolrUtils.SOLR_DATE_FORMAT); + DateFormat formatter = new SimpleDateFormat(SolrUtils.SOLR_DATE_FORMAT); + formatter.setTimeZone(SOLR_TIME_ZONE); + return formatter; } /** diff --git a/dspace-api/src/main/java/org/dspace/util/ThrowableUtils.java b/dspace-api/src/main/java/org/dspace/util/ThrowableUtils.java new file mode 100644 index 000000000000..e1502e89b514 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/ThrowableUtils.java @@ -0,0 +1,43 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +/** + * Things you wish {@link Throwable} or some logging package would do for you. + * + * @author mwood + */ +public class ThrowableUtils { + /** + * Utility class: do not instantiate. + */ + private ThrowableUtils() { } + + /** + * Trace a chain of {@code Throwable}s showing only causes. + * Less voluminous than a stack trace. Useful if you just want to know + * what caused third-party code to return an uninformative exception + * message. + * + * @param throwable the exception or whatever. + * @return list of messages from each {@code Throwable} in the chain, + * separated by '\n'. + */ + static public String formatCauseChain(Throwable throwable) { + StringBuilder trace = new StringBuilder(); + trace.append(throwable.getMessage()); + Throwable cause = throwable.getCause(); + while (null != cause) { + trace.append("\nCaused by: ") + .append(cause.getClass().getCanonicalName()).append(' ') + .append(cause.getMessage()); + cause = cause.getCause(); + } + return trace.toString(); + } +} diff --git a/dspace-api/src/main/java/org/dspace/util/ThrowingConsumer.java b/dspace-api/src/main/java/org/dspace/util/ThrowingConsumer.java new file mode 100644 index 000000000000..a04fea3ef41f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/ThrowingConsumer.java @@ -0,0 +1,12 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +public interface ThrowingConsumer { + void accept(T t) throws E; +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/util/ThrowingMapper.java b/dspace-api/src/main/java/org/dspace/util/ThrowingMapper.java new file mode 100644 index 000000000000..ac4767a85706 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/ThrowingMapper.java @@ -0,0 +1,12 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +public interface ThrowingMapper { + R accept(T t) throws E; +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/util/TimeHelpers.java b/dspace-api/src/main/java/org/dspace/util/TimeHelpers.java new file mode 100644 index 000000000000..87d354a7f6c7 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/TimeHelpers.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.TimeZone; + +/** + * Various manipulations of dates and times. + * + * @author mwood + */ +public class TimeHelpers { + private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); + + /** + * Never instantiate this class. + */ + private TimeHelpers() {} + + /** + * Set a Date's time to midnight UTC. + * + * @param from some date-time. + * @return midnight UTC of the supplied date-time. + */ + public static Date toMidnightUTC(Date from) { + GregorianCalendar calendar = new GregorianCalendar(UTC); + calendar.setTime(from); + calendar.set(GregorianCalendar.HOUR_OF_DAY, 0); + calendar.set(GregorianCalendar.MINUTE, 0); + calendar.set(GregorianCalendar.SECOND, 0); + calendar.set(GregorianCalendar.MILLISECOND, 0); + return calendar.getTime(); + } +} diff --git a/dspace-api/src/main/java/org/dspace/util/WorkbookUtils.java b/dspace-api/src/main/java/org/dspace/util/WorkbookUtils.java index 64b6b95e8ec7..8ac9a4f7f13e 100644 --- a/dspace-api/src/main/java/org/dspace/util/WorkbookUtils.java +++ b/dspace-api/src/main/java/org/dspace/util/WorkbookUtils.java @@ -13,6 +13,7 @@ import java.util.ArrayList; import java.util.List; +import java.util.Optional; import java.util.Spliterators; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -87,6 +88,11 @@ public static List getRowValues(Row row, int size) { return values; } + public static String getEntityTypeCellValue(Row row, int index) { + Cell cell = row.getCell(index); + return getEntityTypeValue(cell); + } + public static String getCellValue(Row row, int index) { Cell cell = row.getCell(index); return getCellValue(cell); @@ -105,6 +111,15 @@ public static String getCellValue(Cell cell) { return formatter.formatCellValue(cell).trim(); } + public static String getEntityTypeValue(Cell cell) { + String cellValue = getCellValue(cell); + return Optional.ofNullable(cellValue) + .filter(value -> StringUtils.isNotBlank(value)) + .filter(value -> value.contains(".")) + .map(value -> value.split("\\.")[0]) + .orElse(cellValue); + } + public static Cell createCell(Row row, int column, String value) { Cell cell = row.createCell(column); cell.setCellValue(value); diff --git a/dspace-api/src/main/java/org/dspace/validation/DetectPotentialDuplicateValidator.java b/dspace-api/src/main/java/org/dspace/validation/DetectPotentialDuplicateValidator.java index 8a9a7aba10bd..4b4e237a3772 100644 --- a/dspace-api/src/main/java/org/dspace/validation/DetectPotentialDuplicateValidator.java +++ b/dspace-api/src/main/java/org/dspace/validation/DetectPotentialDuplicateValidator.java @@ -21,8 +21,8 @@ import org.dspace.app.deduplication.model.DuplicateDecisionType; import org.dspace.app.deduplication.model.DuplicateDecisionValue; -import org.dspace.app.deduplication.utils.DedupUtils; import org.dspace.app.deduplication.utils.DuplicateItemInfo; +import org.dspace.app.deduplication.utils.IDedupUtils; import org.dspace.app.util.SubmissionStepConfig; import org.dspace.content.DSpaceObject; import org.dspace.content.InProgressSubmission; @@ -47,7 +47,7 @@ public class DetectPotentialDuplicateValidator implements SubmissionStepValidato private static final String ERROR_VALIDATION_DUPLICATION = "error.validation.detect-duplicate"; @Autowired - private DedupUtils dedupUtils; + private IDedupUtils dedupUtils; @Autowired private ItemService itemService; diff --git a/dspace-api/src/main/java/org/dspace/validation/LicenseValidator.java b/dspace-api/src/main/java/org/dspace/validation/LicenseValidator.java index ffc67c20311c..c13cb0e89598 100644 --- a/dspace-api/src/main/java/org/dspace/validation/LicenseValidator.java +++ b/dspace-api/src/main/java/org/dspace/validation/LicenseValidator.java @@ -36,7 +36,7 @@ */ public class LicenseValidator implements SubmissionStepValidator { - private static final String ERROR_VALIDATION_LICENSEREQUIRED = "error.validation.license.notgranted"; + public static final String ERROR_VALIDATION_LICENSEREQUIRED = "error.validation.license.required"; private String name; diff --git a/dspace-api/src/main/java/org/dspace/validation/MetadataValidator.java b/dspace-api/src/main/java/org/dspace/validation/MetadataValidator.java index 3d50ddf66cd8..d53b939ee44a 100644 --- a/dspace-api/src/main/java/org/dspace/validation/MetadataValidator.java +++ b/dspace-api/src/main/java/org/dspace/validation/MetadataValidator.java @@ -22,6 +22,7 @@ import org.dspace.app.util.DCInputsReader; import org.dspace.app.util.DCInputsReaderException; import org.dspace.app.util.SubmissionStepConfig; +import org.dspace.app.util.TypeBindUtils; import org.dspace.content.Collection; import org.dspace.content.InProgressSubmission; import org.dspace.content.Item; @@ -69,10 +70,10 @@ public List validate(Context context, InProgressSubmission o List errors = new ArrayList<>(); DCInputSet inputConfig = getDCInputSet(config); - String documentTypeValue = getDocumentTypeValue(obj); + String documentType = TypeBindUtils.getTypeBindValue(obj); // Get list of all field names (including qualdrop names) allowed for this dc.type - List allowedFieldNames = inputConfig.populateAllowedFieldNames(documentTypeValue); + List allowedFieldNames = inputConfig.populateAllowedFieldNames(documentType); for (DCInput[] row : inputConfig.getFields()) { for (DCInput input : row) { @@ -93,7 +94,7 @@ public List validate(Context context, InProgressSubmission o // Check the lookup list. If no other inputs of the same field name allow this type, // then remove. This includes field name without qualifier. - if (!input.isAllowedFor(documentTypeValue) && (!allowedFieldNames.contains(fullFieldname) + if (!input.isAllowedFor(documentType) && (!allowedFieldNames.contains(fullFieldname) && !allowedFieldNames.contains(input.getFieldName()))) { removeMetadataValues(context, obj.getItem(), mdv); } else { @@ -112,24 +113,27 @@ public List validate(Context context, InProgressSubmission o } } else { - fieldsName.add(input.getFieldName()); + String fieldName = input.getFieldName(); + if (fieldName != null) { + fieldsName.add(fieldName); + } } for (String fieldName : fieldsName) { boolean valuesRemoved = false; List mdv = itemService.getMetadataByMetadataString(obj.getItem(), fieldName); - if (!input.isAllowedFor(documentTypeValue)) { + if (!input.isAllowedFor(documentType)) { // Check the lookup list. If no other inputs of the same field name allow this type, // then remove. Otherwise, do not if (!(allowedFieldNames.contains(fieldName))) { removeMetadataValues(context, obj.getItem(), mdv); valuesRemoved = true; log.debug("Stripping metadata values for " + input.getFieldName() + " on type " - + documentTypeValue + " as it is allowed by another input of the same field " + + + documentType + " as it is allowed by another input of the same field " + "name"); } else { log.debug("Not removing unallowed metadata values for " + input.getFieldName() + " on type " - + documentTypeValue + " as it is allowed by another input of the same field " + + + documentType + " as it is allowed by another input of the same field " + "name"); } } @@ -139,7 +143,7 @@ public List validate(Context context, InProgressSubmission o && !valuesRemoved) { // Is the input required for *this* type? In other words, are we looking at a required // input that is also allowed for this document type - if (input.isAllowedFor(documentTypeValue)) { + if (input.isAllowedFor(documentType)) { // since this field is missing add to list of error // fields addError(errors, ERROR_VALIDATION_REQUIRED, @@ -153,12 +157,6 @@ public List validate(Context context, InProgressSubmission o return errors; } - private String getDocumentTypeValue(InProgressSubmission obj) { - String documentTypeField = configurationService.getProperty("submit.type-bind.field", "dc.type"); - List documentType = itemService.getMetadataByMetadataString(obj.getItem(), documentTypeField); - return documentType.size() > 0 ? documentType.get(0).getValue() : ""; - } - private DCInputSet getDCInputSet(SubmissionStepConfig config) { try { return getInputReader().getInputsByFormName(config.getId()); diff --git a/dspace-api/src/main/java/org/dspace/validation/service/impl/ValidationServiceImpl.java b/dspace-api/src/main/java/org/dspace/validation/service/impl/ValidationServiceImpl.java index b4c9b4bc4c1a..65bd0bf19452 100644 --- a/dspace-api/src/main/java/org/dspace/validation/service/impl/ValidationServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/validation/service/impl/ValidationServiceImpl.java @@ -55,7 +55,8 @@ private void setup() throws SubmissionConfigReaderException { @Override public List validate(Context context, InProgressSubmission obj) { - SubmissionConfig submissionConfig = submissionConfigReader.getSubmissionConfigByInProgressSubmission(obj); + SubmissionConfig submissionConfig = submissionConfigReader + .getSubmissionConfigByInProgressSubmission(obj, context); List errors = new ArrayList(); diff --git a/dspace-api/src/main/java/org/dspace/versioning/ItemCorrectionProvider.java b/dspace-api/src/main/java/org/dspace/versioning/ItemCorrectionProvider.java index 75efd7ae6965..a65ac8af97e1 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/ItemCorrectionProvider.java +++ b/dspace-api/src/main/java/org/dspace/versioning/ItemCorrectionProvider.java @@ -193,8 +193,7 @@ protected void updateBundlesAndBitstreams(Context c, Item itemNew, Item nativeIt List nativeBundles = nativeItem.getBundles(bundleName); List correctedBundles = itemNew.getBundles(bundleName); - if (CollectionUtils.isEmpty(nativeBundles) && CollectionUtils.isEmpty(correctedBundles) || - CollectionUtils.isEmpty(correctedBundles)) { + if (CollectionUtils.isEmpty(nativeBundles) && CollectionUtils.isEmpty(correctedBundles)) { continue; } @@ -205,7 +204,14 @@ protected void updateBundlesAndBitstreams(Context c, Item itemNew, Item nativeIt nativeBundle = nativeBundles.get(0); } - updateBundleAndBitstreams(c, nativeBundle, correctedBundles.get(0)); + Bundle correctedBundle; + if (CollectionUtils.isEmpty(correctedBundles)) { + correctedBundle = bundleService.create(c, nativeItem, bundleName); + } else { + correctedBundle = correctedBundles.get(0); + } + + updateBundleAndBitstreams(c, nativeBundle, correctedBundle); } } @@ -255,7 +261,26 @@ protected void updateBundleAndBitstreams(Context c, Bundle nativeBundle, Bundle } } + deleteBitstreams(nativeBundle, correctedBundle); bundleService.update(c, nativeBundle); + if (nativeBundle.getItems().isEmpty()) { + bundleService.delete(c, nativeBundle); + } + } + + private void deleteBitstreams(Bundle nativeBundle, Bundle correctedBundle) { + for (Bitstream bitstream : nativeBundle.getBitstreams()) { + if (contains(correctedBundle, bitstream)) { + continue; + } + nativeBundle.removeBitstream(bitstream); + } + } + + private boolean contains(Bundle bundle, Bitstream bitstream) { + return bundle.getBitstreams().stream() + .map(Bitstream::getChecksum) + .anyMatch(cs -> bitstream.getChecksum().equals(cs)); } protected Bitstream findBitstreamByChecksum(Bundle bundle, String bitstreamChecksum) { diff --git a/dspace-api/src/main/java/org/dspace/versioning/VersioningConsumer.java b/dspace-api/src/main/java/org/dspace/versioning/VersioningConsumer.java index 51a49d14cc4a..3a5da0de1016 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/VersioningConsumer.java +++ b/dspace-api/src/main/java/org/dspace/versioning/VersioningConsumer.java @@ -31,9 +31,15 @@ import org.dspace.content.service.RelationshipTypeService; import org.dspace.core.Constants; import org.dspace.core.Context; +import org.dspace.core.exception.SQLRuntimeException; import org.dspace.discovery.IndexEventConsumer; import org.dspace.event.Consumer; import org.dspace.event.Event; +import org.dspace.orcid.OrcidHistory; +import org.dspace.orcid.OrcidQueue; +import org.dspace.orcid.factory.OrcidServiceFactory; +import org.dspace.orcid.service.OrcidHistoryService; +import org.dspace.orcid.service.OrcidQueueService; import org.dspace.utils.DSpace; import org.dspace.versioning.factory.VersionServiceFactory; import org.dspace.versioning.service.VersionHistoryService; @@ -61,6 +67,8 @@ public class VersioningConsumer implements Consumer { private RelationshipService relationshipService; private RelationshipVersioningUtils relationshipVersioningUtils; private DedupService dedupService; + private OrcidQueueService orcidQueueService; + private OrcidHistoryService orcidHistoryService; @Override public void initialize() throws Exception { @@ -72,6 +80,8 @@ public void initialize() throws Exception { relationshipVersioningUtils = VersionServiceFactory.getInstance().getRelationshipVersioningUtils(); dedupService = new DSpace().getServiceManager().getServiceByName(DedupService.class.getName(), DedupService.class); + this.orcidQueueService = OrcidServiceFactory.getInstance().getOrcidQueueService(); + this.orcidHistoryService = OrcidServiceFactory.getInstance().getOrcidHistoryService(); } @Override @@ -138,6 +148,8 @@ public void consume(Context ctx, Event event) throws Exception { // unarchive previous item unarchiveItem(ctx, previousItem); + handleOrcidSynchronization(ctx, previousItem, latestItem); + updateDuplicateDetection(ctx, latestItem, previousItem); // update relationships @@ -155,6 +167,29 @@ protected void unarchiveItem(Context ctx, Item item) { )); } + private void handleOrcidSynchronization(Context ctx, Item previousItem, Item latestItem) { + try { + replaceOrcidHistoryEntities(ctx, previousItem, latestItem); + removeOrcidQueueEntries(ctx, previousItem); + } catch (SQLException e) { + throw new SQLRuntimeException(e); + } + } + + private void removeOrcidQueueEntries(Context ctx, Item previousItem) throws SQLException { + List queueEntries = orcidQueueService.findByEntity(ctx, previousItem); + for (OrcidQueue queueEntry : queueEntries) { + orcidQueueService.delete(ctx, queueEntry); + } + } + + private void replaceOrcidHistoryEntities(Context ctx, Item previousItem, Item latestItem) throws SQLException { + List entries = orcidHistoryService.findByEntity(ctx, previousItem); + for (OrcidHistory entry : entries) { + entry.setEntity(latestItem); + } + } + private void updateDuplicateDetection(Context ctx, Item latestItem, Item previousItem) throws Exception { dedupService.inheritDecisions(ctx, previousItem, latestItem); dedupService.removeMatch(previousItem); diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java index 56e710c51457..802c4b3c0da2 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java @@ -232,6 +232,8 @@ public XmlWorkflowItem start(Context context, WorkspaceItem wsi) //Get our next step, if none is found, archive our item firstStep = wf.getNextStep(context, wfi, firstStep, ActionResult.OUTCOME_COMPLETE); if (firstStep == null) { + // record the submitted provenance message + recordStart(context, wfi.getItem(),null); archive(context, wfi); } else { activateFirstStep(context, wf, firstStep, wfi); @@ -1246,25 +1248,30 @@ protected void recordStart(Context context, Item myitem, Action action) DCDate now = DCDate.getCurrent(); // Create provenance description - String provmessage = ""; + StringBuffer provmessage = new StringBuffer(); if (myitem.getSubmitter() != null) { - provmessage = "Submitted by " + myitem.getSubmitter().getFullName() - + " (" + myitem.getSubmitter().getEmail() + ") on " - + now.toString() + " workflow start=" + action.getProvenanceStartId() + "\n"; + provmessage.append("Submitted by ").append(myitem.getSubmitter().getFullName()) + .append(" (").append(myitem.getSubmitter().getEmail()).append(") on ") + .append(now.toString()); } else { // else, null submitter - provmessage = "Submitted by unknown (probably automated) on" - + now.toString() + " workflow start=" + action.getProvenanceStartId() + "\n"; + provmessage.append("Submitted by unknown (probably automated) on") + .append(now.toString()); + } + if (action != null) { + provmessage.append(" workflow start=").append(action.getProvenanceStartId()).append("\n"); + } else { + provmessage.append("\n"); } // add sizes and checksums of bitstreams - provmessage += installItemService.getBitstreamProvenanceMessage(context, myitem); + provmessage.append(installItemService.getBitstreamProvenanceMessage(context, myitem)); // Add message to the DC itemService .addMetadata(context, myitem, MetadataSchemaEnum.DC.getName(), - "description", "provenance", "en", provmessage); + "description", "provenance", "en", provmessage.toString()); itemService.update(context, myitem); } diff --git a/dspace-api/src/main/resources/Messages.properties b/dspace-api/src/main/resources/Messages.properties index 2550f68de19f..6d5281d2291a 100644 --- a/dspace-api/src/main/resources/Messages.properties +++ b/dspace-api/src/main/resources/Messages.properties @@ -51,6 +51,7 @@ metadata.bitstream.iiif-virtual.bytes = File size metadata.bitstream.iiif-virtual.checksum = Checksum org.dspace.app.itemexport.no-result = The DSpaceObject that you specified has no items. +org.dspace.app.util.SyndicationFeed.no-description = No Description org.dspace.checker.ResultsLogger.bitstream-format = Bitstream format org.dspace.checker.ResultsLogger.bitstream-found = Bitstream found org.dspace.checker.ResultsLogger.bitstream-id = Bitstream ID @@ -121,3 +122,5 @@ org.dspace.app.rest.exception.EPersonNameNotProvidedException.message = The eper org.dspace.app.rest.exception.GroupNameNotProvidedException.message = Cannot create group, no group name is provided org.dspace.app.rest.exception.GroupHasPendingWorkflowTasksException.message = Cannot delete group, the associated workflow role still has pending tasks org.dspace.app.rest.exception.PasswordNotValidException.message = New password is invalid. Valid passwords must be at least 8 characters long! +org.dspace.app.rest.exception.RESTBitstreamNotFoundException.message = Bitstream with uuid {0} could not be found in \ + the repository diff --git a/dspace-api/src/main/resources/org/dspace/layout/script/service/impl/cris-layout-configuration-template.xls b/dspace-api/src/main/resources/org/dspace/layout/script/service/impl/cris-layout-configuration-template.xls index a921219abfad..2eddc88c6777 100644 Binary files a/dspace-api/src/main/resources/org/dspace/layout/script/service/impl/cris-layout-configuration-template.xls and b/dspace-api/src/main/resources/org/dspace/layout/script/service/impl/cris-layout-configuration-template.xls differ diff --git a/dspace-api/src/main/resources/org/dspace/license/CreativeCommons.xsl b/dspace-api/src/main/resources/org/dspace/license/CreativeCommons.xsl index f32942a302a2..d9f6cd361434 100644 --- a/dspace-api/src/main/resources/org/dspace/license/CreativeCommons.xsl +++ b/dspace-api/src/main/resources/org/dspace/license/CreativeCommons.xsl @@ -8,7 +8,7 @@ http://www.dspace.org/license/ --> - @@ -47,4 +47,4 @@ - \ No newline at end of file + diff --git a/dspace-api/src/main/resources/org/dspace/license/LicenseCleanup.xsl b/dspace-api/src/main/resources/org/dspace/license/LicenseCleanup.xsl index 84c62158fe75..d9a9745a1b10 100644 --- a/dspace-api/src/main/resources/org/dspace/license/LicenseCleanup.xsl +++ b/dspace-api/src/main/resources/org/dspace/license/LicenseCleanup.xsl @@ -8,7 +8,7 @@ http://www.dspace.org/license/ --> - - \ No newline at end of file + diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql deleted file mode 100644 index 7907fccc00ae..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql +++ /dev/null @@ -1,29 +0,0 @@ --- --- Copyright 2010-2017 Boxfuse GmbH --- --- Licensed under the Apache License, Version 2.0 (the "License"); --- you may not use this file except in compliance with the License. --- You may obtain a copy of the License at --- --- http://www.apache.org/licenses/LICENSE-2.0 --- --- Unless required by applicable law or agreed to in writing, software --- distributed under the License is distributed on an "AS IS" BASIS, --- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. --- See the License for the specific language governing permissions and --- limitations under the License. --- ------------------ --- This is the Oracle upgrade script from Flyway v4.2.0, copied/borrowed from: --- https://github.com/flyway/flyway/blob/flyway-4.2.0/flyway-core/src/main/resources/org/flywaydb/core/internal/dbsupport/oracle/upgradeMetaDataTable.sql --- --- The variables in this script are replaced in FlywayUpgradeUtils.upgradeFlywayTable() ------------------- - -DROP INDEX "${schema}"."${table}_vr_idx"; -DROP INDEX "${schema}"."${table}_ir_idx"; -ALTER TABLE "${schema}"."${table}" DROP COLUMN "version_rank"; -ALTER TABLE "${schema}"."${table}" DROP PRIMARY KEY DROP INDEX; -ALTER TABLE "${schema}"."${table}" MODIFY "version" NULL; -ALTER TABLE "${schema}"."${table}" ADD CONSTRAINT "${table}_pk" PRIMARY KEY ("installed_rank"); -UPDATE "${schema}"."${table}" SET "type"='BASELINE' WHERE "type"='INIT'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql index 7548fa4c6acb..edebe6e087fb 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql @@ -15,7 +15,7 @@ -- ----------------- -- This is the PostgreSQL upgrade script from Flyway v4.2.0, copied/borrowed from: --- https://github.com/flyway/flyway/blob/flyway-4.2.0/flyway-core/src/main/resources/org/flywaydb/core/internal/dbsupport/oracle/upgradeMetaDataTable.sql +-- https://github.com/flyway/flyway/blob/flyway-4.2.0/flyway-core/src/main/resources/org/flywaydb/core/internal/dbsupport/postgresql/upgradeMetaDataTable.sql -- -- The variables in this script are replaced in FlywayUpgradeUtils.upgradeFlywayTable() ------------------ diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md index 8088c6ccca62..87e114ca53a5 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md @@ -4,33 +4,25 @@ in Production. Instead, DSpace uses the H2 Database to perform Unit Testing during development. -By default, the DSpace Unit Testing environment configures H2 to run in -"Oracle Mode" and initializes the H2 database using the scripts in this directory. -These database migrations are automatically called by [Flyway](http://flywaydb.org/) -when the `DatabaseManager` initializes itself (see `initializeDatabase()` method). - -The H2 migrations in this directory are *based on* the Oracle Migrations, but -with some modifications in order to be valid in H2. - -## Oracle vs H2 script differences +By default, the DSpace Unit Testing environment configures H2 to run in memory +and initializes the H2 database using the scripts in this directory. See +`[src]/dspace-api/src/test/data/dspaceFolder/config/local.cfg`. -One of the primary differences between the Oracle scripts and these H2 ones -is in the syntax of the `ALTER TABLE` command. Unfortunately, H2's syntax for -that command differs greatly from Oracle (and PostgreSQL as well). +These database migrations are automatically called by [Flyway](http://flywaydb.org/) +in `DatabaseUtils`. -Most of the remainder of the scripts contain the exact Oracle syntax (which is -usually valid in H2). But, to you can always `diff` scripts of the same name -for further syntax differences. +The H2 migrations in this directory all use H2's grammar/syntax. +For additional info see the [H2 SQL Grammar](https://www.h2database.com/html/grammar.html). -For additional info see the [H2 SQL Grammar](http://www.h2database.com/html/grammar.html). ## More Information on Flyway The SQL scripts in this directory are H2-specific database migrations. They are used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/). As such, these scripts are automatically called by Flyway when the DSpace -`DatabaseManager` initializes itself (see `initializeDatabase()` method). During -that process, Flyway determines which version of DSpace your database is using +`DatabaseUtils` initializes. + +During that process, Flyway determines which version of DSpace your database is using and then executes the appropriate upgrade script(s) to bring it up to the latest version. diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.0_2021.03.26__process_to_group.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.0_2021.03.26__process_to_group.sql index c7cfdd84d551..01d5e709c1cb 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.0_2021.03.26__process_to_group.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.0_2021.03.26__process_to_group.sql @@ -6,6 +6,13 @@ -- http://www.dspace.org/license/ -- +-- =============================================================== +-- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING +-- +-- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED +-- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. +-- http://flywaydb.org/ +-- =============================================================== ------------------------------------------------------------------------------- -- Sequences for Process within Group feature ------------------------------------------------------------------------------- @@ -14,4 +21,9 @@ CREATE TABLE Process2Group ( process_id INTEGER REFERENCES Process(process_id), group_id UUID REFERENCES epersongroup (uuid) ON DELETE CASCADE -); \ No newline at end of file +); +----------------------------------------------------------------------------------- +-- Drop the 'history_seq' sequence (related table deleted at Dspace-1.5) +----------------------------------------------------------------------------------- + +DROP SEQUENCE history_seq; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.5_2023.09.22__registration_data.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.5_2023.09.22__registration_data.sql new file mode 100644 index 000000000000..6b4994b6644e --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.5_2023.09.22__registration_data.sql @@ -0,0 +1,46 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- ALTER table registrationdata +----------------------------------------------------------------------------------- + +EXECUTE IMMEDIATE 'ALTER TABLE registrationdata DROP CONSTRAINT ' || + QUOTE_IDENT((SELECT CONSTRAINT_NAME + FROM information_schema.key_column_usage + WHERE TABLE_SCHEMA = 'PUBLIC' AND TABLE_NAME = 'REGISTRATIONDATA' AND COLUMN_NAME = 'EMAIL')); + +ALTER TABLE registrationdata +ADD COLUMN registration_type VARCHAR2(255); + +ALTER TABLE registrationdata +ADD COLUMN net_id VARCHAR2(64); + +CREATE SEQUENCE IF NOT EXISTS registrationdata_metadatavalue_seq START WITH 1 INCREMENT BY 1; + +----------------------------------------------------------------------------------- +-- Creates table registrationdata_metadata +----------------------------------------------------------------------------------- + +CREATE TABLE registrationdata_metadata ( + registrationdata_metadata_id INTEGER NOT NULL, + registrationdata_id INTEGER, + metadata_field_id INTEGER, + text_value VARCHAR2(2000), + CONSTRAINT pk_registrationdata_metadata PRIMARY KEY (registrationdata_metadata_id) +); + +ALTER TABLE registrationdata_metadata +ADD CONSTRAINT FK_REGISTRATIONDATA_METADATA_ON_METADATA_FIELD + FOREIGN KEY (metadata_field_id) + REFERENCES metadatafieldregistry (metadata_field_id) ON DELETE CASCADE; + +ALTER TABLE registrationdata_metadata +ADD CONSTRAINT FK_REGISTRATIONDATA_METADATA_ON_REGISTRATIONDATA + FOREIGN KEY (registrationdata_id) + REFERENCES registrationdata (registrationdata_id) ON DELETE CASCADE; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.02.08__tilted_rels.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.17__Remove_unused_sequence.sql similarity index 77% rename from dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.02.08__tilted_rels.sql rename to dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.17__Remove_unused_sequence.sql index 95d07be477d5..e4544e1de729 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.02.08__tilted_rels.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.17__Remove_unused_sequence.sql @@ -7,7 +7,7 @@ -- ----------------------------------------------------------------------------------- --- Create columns copy_left and copy_right for RelationshipType +-- Drop the 'history_seq' sequence (related table deleted at Dspace-1.5) ----------------------------------------------------------------------------------- -ALTER TABLE relationship_type ADD tilted INTEGER; +DROP SEQUENCE IF EXISTS history_seq; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.11.13__relationship_type_copy_left_right.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql similarity index 60% rename from dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.11.13__relationship_type_copy_left_right.sql rename to dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql index 0db294c1c13a..8aec44a7f6f2 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.11.13__relationship_type_copy_left_right.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql @@ -7,8 +7,11 @@ -- ----------------------------------------------------------------------------------- --- Create columns copy_left and copy_right for RelationshipType +-- Update short description for PNG mimetype in the bitstream format registry +-- See: https://github.com/DSpace/DSpace/pull/8722 ----------------------------------------------------------------------------------- -ALTER TABLE relationship_type ADD copy_to_left NUMBER(1) DEFAULT 0 NOT NULL; -ALTER TABLE relationship_type ADD copy_to_right NUMBER(1) DEFAULT 0 NOT NULL; +UPDATE bitstreamformatregistry +SET short_description='PNG' +WHERE short_description='image/png' + AND mimetype='image/png'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql new file mode 100644 index 000000000000..7641eb9fc2c0 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql @@ -0,0 +1,10 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +ALTER TABLE orcid_history ALTER COLUMN description SET DATA TYPE CLOB; +ALTER TABLE orcid_queue ALTER COLUMN description SET DATA TYPE CLOB; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.04.19__process_parameters_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.04.19__process_parameters_to_text_type.sql new file mode 100644 index 000000000000..1028ba370c47 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.04.19__process_parameters_to_text_type.sql @@ -0,0 +1,9 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +ALTER TABLE process ALTER COLUMN parameters SET DATA TYPE CLOB; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.08.08__DS-1945_Helpdesk_Request_a_Copy.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.10.23__add_custom_filter.sql similarity index 72% rename from dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.08.08__DS-1945_Helpdesk_Request_a_Copy.sql rename to dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.10.23__add_custom_filter.sql index c86cfe31223e..369bd14f7064 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.08.08__DS-1945_Helpdesk_Request_a_Copy.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.10.23__add_custom_filter.sql @@ -14,7 +14,4 @@ -- http://flywaydb.org/ -- =============================================================== ------------------------------------------------------- --- DS-1945 RequestItem Helpdesk, store request message ------------------------------------------------------- -ALTER TABLE requestitem ADD request_message VARCHAR2(2000); +ALTER TABLE cris_layout_tab ADD IF NOT EXISTS custom_filter varchar(255); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.10.28__update_cris_layout_tab_constraint.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.10.28__update_cris_layout_tab_constraint.sql new file mode 100644 index 000000000000..6ea435bfeed2 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.10.28__update_cris_layout_tab_constraint.sql @@ -0,0 +1,18 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +-- -- +-- Remove unique constraint on entity_id and shortname of table cris_layout_tab. +-- Now the entity_id and shortname aren't unique because entity_type can have custom_filter in it +-- -- +ALTER TABLE cris_layout_tab DROP CONSTRAINT cris_layout_tab_entity_shortname_unique; + +-- -- +-- +-- -- +ALTER TABLE cris_layout_tab ADD CONSTRAINT cris_layout_tab_entity_shortname_custom_filter_unique UNIQUE(entity_id, shortname, custom_filter); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.12.12__add_alternative_tab_id_to_cris_layout_tab2securitygroup.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.12.12__add_alternative_tab_id_to_cris_layout_tab2securitygroup.sql new file mode 100644 index 000000000000..6ae50fb29bf6 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.12.12__add_alternative_tab_id_to_cris_layout_tab2securitygroup.sql @@ -0,0 +1,14 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Alter TABLE cris_layout_tab2securitygroup ADD alternative_tab_id +----------------------------------------------------------------------------------- + +ALTER TABLE cris_layout_tab2securitygroup ADD COLUMN alternative_tab_id INTEGER; +ALTER TABLE cris_layout_tab2securitygroup ADD CONSTRAINT cris_layout_tab2securitygroup_tab_id2 FOREIGN KEY (alternative_tab_id) REFERENCES cris_layout_tab (id) ON DELETE SET NULL; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.12.13__add_alternative_box_id_to_cris_layout_box2securitygroup.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.12.13__add_alternative_box_id_to_cris_layout_box2securitygroup.sql new file mode 100644 index 000000000000..38360bb13cd8 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.12.13__add_alternative_box_id_to_cris_layout_box2securitygroup.sql @@ -0,0 +1,14 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Alter TABLE cris_layout_box2securitygroup ADD alternative_box_id +----------------------------------------------------------------------------------- + +ALTER TABLE cris_layout_box2securitygroup ADD COLUMN alternative_box_id INTEGER; +ALTER TABLE cris_layout_box2securitygroup ADD CONSTRAINT cris_layout_box2securitygroup_box_id2 FOREIGN KEY (alternative_box_id) REFERENCES cris_layout_box (id) ON DELETE SET NULL; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/metadata/oracle/V7.0_2020.10.31__CollectionCommunity_Metadata_Handle.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/metadata/oracle/V7.0_2020.10.31__CollectionCommunity_Metadata_Handle.sql deleted file mode 100644 index fff1fe154f57..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/metadata/oracle/V7.0_2020.10.31__CollectionCommunity_Metadata_Handle.sql +++ /dev/null @@ -1,90 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------- --- This will create COMMUNITY handle metadata -------------------------------------------------------------- - -insert into metadatavalue (metadata_field_id, text_value, text_lang, place, authority, confidence, dspace_object_id) - select distinct - T1.metadata_field_id as metadata_field_id, - concat('${handle.canonical.prefix}', h.handle) as text_value, - null as text_lang, 0 as place, - null as authority, - -1 as confidence, - c.uuid as dspace_object_id - - from community c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - - cross join (select mfr.metadata_field_id as metadata_field_id from metadatafieldregistry mfr - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri') T1 - - where uuid not in ( - select c.uuid as uuid from community c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri' - ) -; - -------------------------------------------------------------- --- This will create COLLECTION handle metadata -------------------------------------------------------------- - -insert into metadatavalue (metadata_field_id, text_value, text_lang, place, authority, confidence, dspace_object_id) - select distinct - T1.metadata_field_id as metadata_field_id, - concat('${handle.canonical.prefix}', h.handle) as text_value, - null as text_lang, 0 as place, - null as authority, - -1 as confidence, - c.uuid as dspace_object_id - - from collection c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - - cross join (select mfr.metadata_field_id as metadata_field_id from metadatafieldregistry mfr - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri') T1 - - where uuid not in ( - select c.uuid as uuid from collection c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri' - ) -; - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md deleted file mode 100644 index 6cef123859ca..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md +++ /dev/null @@ -1,84 +0,0 @@ -# Oracle Flyway Database Migrations (i.e. Upgrades) - ---- -WARNING: Oracle Support is deprecated. -See https://github.com/DSpace/DSpace/issues/8214 ---- - -The SQL scripts in this directory are Oracle-specific database migrations. They are -used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/). -As such, these scripts are automatically called by Flyway when the DSpace -`DatabaseManager` initializes itself (see `initializeDatabase()` method). During -that process, Flyway determines which version of DSpace your database is using -and then executes the appropriate upgrade script(s) to bring it up to the latest -version. - -If any failures occur, Flyway will "rollback" the upgrade script which resulted -in an error and log the issue in the DSpace log file at `[dspace]/log/dspace.log.[date]` - -**WARNING:** IT IS NOT RECOMMENDED TO RUN THESE SCRIPTS MANUALLY. If you do so, -Flyway will may throw failures the next time you startup DSpace, as Flyway will -not realize you manually ran one or more scripts. - -Please see the Flyway Documentation for more information: http://flywaydb.org/ - -## Oracle Porting Notes for the Curious - -Oracle is missing quite a number of cool features found in Postgres, so -workarounds had to be found, most of which are hidden behind tests in -DatabaseManager. If Oracle is your DBMS, the workarounds are activated: - -Oracle doesn't like ';' characters in JDBC SQL - they have all been removed -from the DSpace source, including code in the .sql file reader to strip ;'s. - -browse code - LIMIT and OFFSET is used to limit browse results, and an -Oracle-hack is used to limit the result set to a given size - -Oracle has no boolean data type, so a new schema file was created that -uses NUMBER(1) (AKA 'integers') and code is inserted everywhere to use 0 for -false and 1 for true if DSpace is using Oracle. - -Oracle doesn't have a TEXT data type either, so TEXT columns are defined -as VARCHAR2 in the Oracle-specific schema. - -Oracle doesn't allow dynamic naming for objects, so our cute trick to -derive the name of the sequence by appending _seq to the table name -in a function doesn't work in Oracle - workaround is to insert Oracle -code to generate the name of the sequence and then place that into -our SQL calls to generate a new ID. - -Oracle doesn't let you directly set the value of sequences, so -update-sequences.sql is forced to use a special script sequpdate.sql -to update the sequences. - -Bitstream had a column 'size' which is a reserved word in Oracle, -so this had to be changed to 'size_bytes' with corresponding code changes. - -VARCHAR2 has a limit of 4000 characters, so DSpace text data is limited to 4k. -Going to the CLOB data type can get around that, but seemed like too much effort -for now. Note that with UTF-8 encoding that 4k could translate to 1300 -characters worst-case (every character taking up 3 bytes is the worst case -scenario.) - -### UPDATE 5 April 2007 - -CLOBs are now used as follows: -MetadataValue:text_value -Community:introductory_text -Community:copyright_text -Collection:introductory_text -Collection:license -Collection:copyright_text - -DatabaseManager had to have some of the type checking changed, because Oracle's -JDBC driver is reporting INTEGERS as type DECIMAL. - -Oracle doesn't like it when you reference table names in lower case when -getting JDBC metadata for the tables, so they are converted in TableRow -to upper case. - -### UPDATE 27 November 2012 - -Oracle complains with ORA-01408 if you attempt to create an index on a column which -has already had the UNIQUE contraint added (such an index is implicit in maintaining the uniqueness -of the column). See [DS-1370](https://jira.duraspace.org/browse/DS-1370) for details. diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.2__Initial_DSpace_1.2_Oracle_database_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.2__Initial_DSpace_1.2_Oracle_database_schema.sql deleted file mode 100644 index 157274e05d66..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.2__Initial_DSpace_1.2_Oracle_database_schema.sql +++ /dev/null @@ -1,550 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -CREATE SEQUENCE bitstreamformatregistry_seq; -CREATE SEQUENCE fileextension_seq; -CREATE SEQUENCE bitstream_seq; -CREATE SEQUENCE eperson_seq; --- start group sequence at 0, since Anonymous group = 0 -CREATE SEQUENCE epersongroup_seq MINVALUE 0 START WITH 0; -CREATE SEQUENCE item_seq; -CREATE SEQUENCE bundle_seq; -CREATE SEQUENCE item2bundle_seq; -CREATE SEQUENCE bundle2bitstream_seq; -CREATE SEQUENCE dctyperegistry_seq; -CREATE SEQUENCE dcvalue_seq; -CREATE SEQUENCE community_seq; -CREATE SEQUENCE collection_seq; -CREATE SEQUENCE community2community_seq; -CREATE SEQUENCE community2collection_seq; -CREATE SEQUENCE collection2item_seq; -CREATE SEQUENCE resourcepolicy_seq; -CREATE SEQUENCE epersongroup2eperson_seq; -CREATE SEQUENCE handle_seq; -CREATE SEQUENCE workspaceitem_seq; -CREATE SEQUENCE workflowitem_seq; -CREATE SEQUENCE tasklistitem_seq; -CREATE SEQUENCE registrationdata_seq; -CREATE SEQUENCE subscription_seq; -CREATE SEQUENCE history_seq; -CREATE SEQUENCE historystate_seq; -CREATE SEQUENCE communities2item_seq; -CREATE SEQUENCE itemsbyauthor_seq; -CREATE SEQUENCE itemsbytitle_seq; -CREATE SEQUENCE itemsbydate_seq; -CREATE SEQUENCE itemsbydateaccessioned_seq; - - -------------------------------------------------------- --- BitstreamFormatRegistry table -------------------------------------------------------- -CREATE TABLE BitstreamFormatRegistry -( - bitstream_format_id INTEGER PRIMARY KEY, - mimetype VARCHAR2(48), - short_description VARCHAR2(128) UNIQUE, - description VARCHAR2(2000), - support_level INTEGER, - -- Identifies internal types - internal NUMBER(1) -); - -------------------------------------------------------- --- FileExtension table -------------------------------------------------------- -CREATE TABLE FileExtension -( - file_extension_id INTEGER PRIMARY KEY, - bitstream_format_id INTEGER REFERENCES BitstreamFormatRegistry(bitstream_format_id), - extension VARCHAR2(16) -); - -------------------------------------------------------- --- Bitstream table -------------------------------------------------------- -CREATE TABLE Bitstream -( - bitstream_id INTEGER PRIMARY KEY, - bitstream_format_id INTEGER REFERENCES BitstreamFormatRegistry(bitstream_format_id), - name VARCHAR2(256), - size_bytes INTEGER, - checksum VARCHAR2(64), - checksum_algorithm VARCHAR2(32), - description VARCHAR2(2000), - user_format_description VARCHAR2(2000), - source VARCHAR2(256), - internal_id VARCHAR2(256), - deleted NUMBER(1), - store_number INTEGER, - sequence_id INTEGER -); - -------------------------------------------------------- --- EPerson table -------------------------------------------------------- -CREATE TABLE EPerson -( - eperson_id INTEGER PRIMARY KEY, - email VARCHAR2(64) UNIQUE, - password VARCHAR2(64), - firstname VARCHAR2(64), - lastname VARCHAR2(64), - can_log_in NUMBER(1), - require_certificate NUMBER(1), - self_registered NUMBER(1), - last_active TIMESTAMP, - sub_frequency INTEGER, - phone VARCHAR2(32) -); - -------------------------------------------------------- --- EPersonGroup table -------------------------------------------------------- -CREATE TABLE EPersonGroup -( - eperson_group_id INTEGER PRIMARY KEY, - name VARCHAR2(256) UNIQUE -); - -------------------------------------------------------- --- Item table -------------------------------------------------------- -CREATE TABLE Item -( - item_id INTEGER PRIMARY KEY, - submitter_id INTEGER REFERENCES EPerson(eperson_id), - in_archive NUMBER(1), - withdrawn NUMBER(1), - last_modified TIMESTAMP, - owning_collection INTEGER -); - -------------------------------------------------------- --- Bundle table -------------------------------------------------------- -CREATE TABLE Bundle -( - bundle_id INTEGER PRIMARY KEY, - mets_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - name VARCHAR2(16), -- ORIGINAL | THUMBNAIL | TEXT - primary_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id) -); - -------------------------------------------------------- --- Item2Bundle table -------------------------------------------------------- -CREATE TABLE Item2Bundle -( - id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - bundle_id INTEGER REFERENCES Bundle(bundle_id) -); - --- index by item_id -CREATE INDEX item2bundle_item_idx on Item2Bundle(item_id); - -------------------------------------------------------- --- Bundle2Bitstream table -------------------------------------------------------- -CREATE TABLE Bundle2Bitstream -( - id INTEGER PRIMARY KEY, - bundle_id INTEGER REFERENCES Bundle(bundle_id), - bitstream_id INTEGER REFERENCES Bitstream(bitstream_id) -); - --- index by bundle_id -CREATE INDEX bundle2bitstream_bundle_idx ON Bundle2Bitstream(bundle_id); - -------------------------------------------------------- --- DCTypeRegistry table -------------------------------------------------------- -CREATE TABLE DCTypeRegistry -( - dc_type_id INTEGER PRIMARY KEY, - element VARCHAR2(64), - qualifier VARCHAR2(64), - scope_note VARCHAR2(2000), - UNIQUE(element, qualifier) -); - -------------------------------------------------------- --- DCValue table -------------------------------------------------------- -CREATE TABLE DCValue -( - dc_value_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - dc_type_id INTEGER REFERENCES DCTypeRegistry(dc_type_id), - text_value VARCHAR2(2000), - text_lang VARCHAR2(24), - place INTEGER, - source_id INTEGER -); - --- An index for item_id - almost all access is based on --- instantiating the item object, which grabs all dcvalues --- related to that item -CREATE INDEX dcvalue_item_idx on DCValue(item_id); - -------------------------------------------------------- --- Community table -------------------------------------------------------- -CREATE TABLE Community -( - community_id INTEGER PRIMARY KEY, - name VARCHAR2(128) UNIQUE, - short_description VARCHAR2(512), - introductory_text VARCHAR2(2000), - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - copyright_text VARCHAR2(2000), - side_bar_text VARCHAR2(2000) -); - -------------------------------------------------------- --- Collection table -------------------------------------------------------- -CREATE TABLE Collection -( - collection_id INTEGER PRIMARY KEY, - name VARCHAR2(128), - short_description VARCHAR2(512), - introductory_text VARCHAR2(2000), - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - template_item_id INTEGER REFERENCES Item(item_id), - provenance_description VARCHAR2(2000), - license VARCHAR2(2000), - copyright_text VARCHAR2(2000), - side_bar_text VARCHAR2(2000), - workflow_step_1 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_2 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_3 INTEGER REFERENCES EPersonGroup( eperson_group_id ) -); - -------------------------------------------------------- --- Community2Community table -------------------------------------------------------- -CREATE TABLE Community2Community -( - id INTEGER PRIMARY KEY, - parent_comm_id INTEGER REFERENCES Community(community_id), - child_comm_id INTEGER REFERENCES Community(community_id) -); - -------------------------------------------------------- --- Community2Collection table -------------------------------------------------------- -CREATE TABLE Community2Collection -( - id INTEGER PRIMARY KEY, - community_id INTEGER REFERENCES Community(community_id), - collection_id INTEGER REFERENCES Collection(collection_id) -); - -------------------------------------------------------- --- Collection2Item table -------------------------------------------------------- -CREATE TABLE Collection2Item -( - id INTEGER PRIMARY KEY, - collection_id INTEGER REFERENCES Collection(collection_id), - item_id INTEGER REFERENCES Item(item_id) -); - --- index by collection_id -CREATE INDEX collection2item_collection_idx ON Collection2Item(collection_id); - -------------------------------------------------------- --- ResourcePolicy table -------------------------------------------------------- -CREATE TABLE ResourcePolicy -( - policy_id INTEGER PRIMARY KEY, - resource_type_id INTEGER, - resource_id INTEGER, - action_id INTEGER, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - epersongroup_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - start_date DATE, - end_date DATE -); - --- index by resource_type,resource_id - all queries by --- authorization manager are select type=x, id=y, action=z -CREATE INDEX resourcepolicy_type_id_idx ON ResourcePolicy(resource_type_id,resource_id); - -------------------------------------------------------- --- EPersonGroup2EPerson table -------------------------------------------------------- -CREATE TABLE EPersonGroup2EPerson -( - id INTEGER PRIMARY KEY, - eperson_group_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - eperson_id INTEGER REFERENCES EPerson(eperson_id) -); - --- Index by group ID (used heavily by AuthorizeManager) -CREATE INDEX epersongroup2eperson_group_idx on EPersonGroup2EPerson(eperson_group_id); - - -------------------------------------------------------- --- Handle table -------------------------------------------------------- -CREATE TABLE Handle -( - handle_id INTEGER PRIMARY KEY, - handle VARCHAR2(256) UNIQUE, - resource_type_id INTEGER, - resource_id INTEGER -); - -------------------------------------------------------- --- WorkspaceItem table -------------------------------------------------------- -CREATE TABLE WorkspaceItem -( - workspace_item_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - collection_id INTEGER REFERENCES Collection(collection_id), - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), -- boolean - published_before NUMBER(1), - multiple_files NUMBER(1), - -- How for the user has got in the submit process - stage_reached INTEGER -); - -------------------------------------------------------- --- WorkflowItem table -------------------------------------------------------- -CREATE TABLE WorkflowItem -( - workflow_id INTEGER PRIMARY KEY, - item_id INTEGER UNIQUE REFERENCES Item(item_id), - collection_id INTEGER REFERENCES Collection(collection_id), - state INTEGER, - owner INTEGER REFERENCES EPerson(eperson_id), - - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), - published_before NUMBER(1), - multiple_files NUMBER(1) - -- Note: stage reached not applicable here - people involved in workflow - -- can always jump around submission UI - -); - -------------------------------------------------------- --- TasklistItem table -------------------------------------------------------- -CREATE TABLE TasklistItem -( - tasklist_id INTEGER PRIMARY KEY, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - workflow_id INTEGER REFERENCES WorkflowItem(workflow_id) -); - - -------------------------------------------------------- --- RegistrationData table -------------------------------------------------------- -CREATE TABLE RegistrationData -( - registrationdata_id INTEGER PRIMARY KEY, - email VARCHAR2(64) UNIQUE, - token VARCHAR2(48), - expires TIMESTAMP -); - - -------------------------------------------------------- --- Subscription table -------------------------------------------------------- -CREATE TABLE Subscription -( - subscription_id INTEGER PRIMARY KEY, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - collection_id INTEGER REFERENCES Collection(collection_id) -); - - -------------------------------------------------------- --- History table -------------------------------------------------------- -CREATE TABLE History -( - history_id INTEGER PRIMARY KEY, - -- When it was stored - creation_date TIMESTAMP, - -- A checksum to keep INTEGERizations from being stored more than once - checksum VARCHAR2(32) UNIQUE -); - -------------------------------------------------------- --- HistoryState table -------------------------------------------------------- -CREATE TABLE HistoryState -( - history_state_id INTEGER PRIMARY KEY, - object_id VARCHAR2(64) -); - ------------------------------------------------------------- --- Browse subsystem tables and views ------------------------------------------------------------- - -------------------------------------------------------- --- Communities2Item table -------------------------------------------------------- -CREATE TABLE Communities2Item -( - id INTEGER PRIMARY KEY, - community_id INTEGER REFERENCES Community(community_id), - item_id INTEGER REFERENCES Item(item_id) -); - -------------------------------------------------------- --- Community2Item view ------------------------------------------------------- -CREATE VIEW Community2Item as -SELECT Community2Collection.community_id, Collection2Item.item_id -FROM Community2Collection, Collection2Item -WHERE Collection2Item.collection_id = Community2Collection.collection_id -; - -------------------------------------------------------- --- ItemsByAuthor table -------------------------------------------------------- -CREATE TABLE ItemsByAuthor -( - items_by_author_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - author VARCHAR2(2000), - sort_author VARCHAR2(2000) -); - --- index by sort_author, of course! -CREATE INDEX sort_author_idx on ItemsByAuthor(sort_author); - -------------------------------------------------------- --- CollectionItemsByAuthor view -------------------------------------------------------- -CREATE VIEW CollectionItemsByAuthor as -SELECT Collection2Item.collection_id, ItemsByAuthor.* -FROM ItemsByAuthor, Collection2Item -WHERE ItemsByAuthor.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByAuthor view -------------------------------------------------------- -CREATE VIEW CommunityItemsByAuthor as -SELECT Communities2Item.community_id, ItemsByAuthor.* -FROM ItemsByAuthor, Communities2Item -WHERE ItemsByAuthor.item_id = Communities2Item.item_id -; - ----------------------------------------- --- ItemsByTitle table ----------------------------------------- -CREATE TABLE ItemsByTitle -( - items_by_title_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - title VARCHAR2(2000), - sort_title VARCHAR2(2000) -); - --- index by the sort_title -CREATE INDEX sort_title_idx on ItemsByTitle(sort_title); - - -------------------------------------------------------- --- CollectionItemsByTitle view -------------------------------------------------------- -CREATE VIEW CollectionItemsByTitle as -SELECT Collection2Item.collection_id, ItemsByTitle.* -FROM ItemsByTitle, Collection2Item -WHERE ItemsByTitle.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByTitle view -------------------------------------------------------- -CREATE VIEW CommunityItemsByTitle as -SELECT Communities2Item.community_id, ItemsByTitle.* -FROM ItemsByTitle, Communities2Item -WHERE ItemsByTitle.item_id = Communities2Item.item_id -; - -------------------------------------------------------- --- ItemsByDate table -------------------------------------------------------- -CREATE TABLE ItemsByDate -( - items_by_date_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - date_issued VARCHAR2(2000) -); - --- sort by date -CREATE INDEX date_issued_idx on ItemsByDate(date_issued); - -------------------------------------------------------- --- CollectionItemsByDate view -------------------------------------------------------- -CREATE VIEW CollectionItemsByDate as -SELECT Collection2Item.collection_id, ItemsByDate.* -FROM ItemsByDate, Collection2Item -WHERE ItemsByDate.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByDate view -------------------------------------------------------- -CREATE VIEW CommunityItemsByDate as -SELECT Communities2Item.community_id, ItemsByDate.* -FROM ItemsByDate, Communities2Item -WHERE ItemsByDate.item_id = Communities2Item.item_id -; - -------------------------------------------------------- --- ItemsByDateAccessioned table -------------------------------------------------------- -CREATE TABLE ItemsByDateAccessioned -( - items_by_date_accessioned_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - date_accessioned VARCHAR2(2000) -); - -------------------------------------------------------- --- CollectionItemsByDateAccession view -------------------------------------------------------- -CREATE VIEW CollectionItemsByDateAccession as -SELECT Collection2Item.collection_id, ItemsByDateAccessioned.* -FROM ItemsByDateAccessioned, Collection2Item -WHERE ItemsByDateAccessioned.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByDateAccession view -------------------------------------------------------- -CREATE VIEW CommunityItemsByDateAccession as -SELECT Communities2Item.community_id, ItemsByDateAccessioned.* -FROM ItemsByDateAccessioned, Communities2Item -WHERE ItemsByDateAccessioned.item_id = Communities2Item.item_id -; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.3__Upgrade_to_DSpace_1.3_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.3__Upgrade_to_DSpace_1.3_schema.sql deleted file mode 100644 index 37d7e115eb53..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.3__Upgrade_to_DSpace_1.3_schema.sql +++ /dev/null @@ -1,57 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -CREATE SEQUENCE epersongroup2workspaceitem_seq; - -------------------------------------------------------------------------------- --- create the new EPersonGroup2WorkspaceItem table -------------------------------------------------------------------------------- - -CREATE TABLE EPersonGroup2WorkspaceItem -( - id INTEGER PRIMARY KEY, - eperson_group_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - workspace_item_id INTEGER REFERENCES WorkspaceItem(workspace_item_id) -); - -------------------------------------------------------------------------------- --- modification to collection table to support being able to change the --- submitter and collection admin group names -------------------------------------------------------------------------------- -ALTER TABLE collection ADD submitter INTEGER REFERENCES EPersonGroup(eperson_group_id); - -ALTER TABLE collection ADD admin INTEGER REFERENCES EPersonGroup(eperson_group_id); - -ALTER TABLE eperson ADD netid VARCHAR2(64) UNIQUE; - -------------------------------------------------------------------------------- --- Additional indices for performance -------------------------------------------------------------------------------- - --- index by resource id and resource type id -CREATE INDEX handle_resource_id_type_idx ON handle(resource_id, resource_type_id); - --- Indexing browse tables update/re-index performance -CREATE INDEX Communities2Item_item_id_idx ON Communities2Item( item_id ); -CREATE INDEX ItemsByAuthor_item_id_idx ON ItemsByAuthor(item_id); -CREATE INDEX ItemsByTitle_item_id_idx ON ItemsByTitle(item_id); -CREATE INDEX ItemsByDate_item_id_idx ON ItemsByDate(item_id); -CREATE INDEX ItemsByDateAcc_item_id_idx ON ItemsByDateAccessioned(item_id); - --- Improve mapping tables -CREATE INDEX Com2Coll_community_id_idx ON Community2Collection(community_id); -CREATE INDEX Com2Coll_collection_id_idx ON Community2Collection(collection_id); -CREATE INDEX Coll2Item_item_id_idx ON Collection2Item( item_id ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql deleted file mode 100644 index a713ced8bbb2..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql +++ /dev/null @@ -1,133 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ---------------------------------------- --- Update MetadataValue to include CLOB ---------------------------------------- - -CREATE TABLE MetadataValueTemp -( - metadata_value_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - metadata_field_id INTEGER REFERENCES MetadataFieldRegistry(metadata_field_id), - text_value CLOB, - text_lang VARCHAR(64), - place INTEGER -); - -INSERT INTO MetadataValueTemp -SELECT * FROM MetadataValue; - -DROP VIEW dcvalue; -DROP TABLE MetadataValue; -ALTER TABLE MetadataValueTemp RENAME TO MetadataValue; - -CREATE VIEW dcvalue AS - SELECT MetadataValue.metadata_value_id AS "dc_value_id", MetadataValue.item_id, - MetadataValue.metadata_field_id AS "dc_type_id", MetadataValue.text_value, - MetadataValue.text_lang, MetadataValue.place - FROM MetadataValue, MetadataFieldRegistry - WHERE MetadataValue.metadata_field_id = MetadataFieldRegistry.metadata_field_id - AND MetadataFieldRegistry.metadata_schema_id = 1; - -CREATE INDEX metadatavalue_item_idx ON MetadataValue(item_id); -CREATE INDEX metadatavalue_item_idx2 ON MetadataValue(item_id,metadata_field_id); - ------------------------------------- --- Update Community to include CLOBs ------------------------------------- - -CREATE TABLE CommunityTemp -( - community_id INTEGER PRIMARY KEY, - name VARCHAR2(128), - short_description VARCHAR2(512), - introductory_text CLOB, - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - copyright_text CLOB, - side_bar_text VARCHAR2(2000) -); - -INSERT INTO CommunityTemp -SELECT * FROM Community; - -DROP TABLE Community CASCADE CONSTRAINTS; -ALTER TABLE CommunityTemp RENAME TO Community; - -ALTER TABLE Community2Community ADD CONSTRAINT fk_c2c_parent -FOREIGN KEY (parent_comm_id) -REFERENCES Community (community_id); - -ALTER TABLE Community2Community ADD CONSTRAINT fk_c2c_child -FOREIGN KEY (child_comm_id) -REFERENCES Community (community_id); - -ALTER TABLE Community2Collection ADD CONSTRAINT fk_c2c_community -FOREIGN KEY (community_id) -REFERENCES Community (community_id); - -ALTER TABLE Communities2Item ADD CONSTRAINT fk_c2i_community -FOREIGN KEY (community_id) -REFERENCES Community (community_id); - -------------------------------------- --- Update Collection to include CLOBs -------------------------------------- - -CREATE TABLE CollectionTemp -( - collection_id INTEGER PRIMARY KEY, - name VARCHAR2(128), - short_description VARCHAR2(512), - introductory_text CLOB, - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - template_item_id INTEGER REFERENCES Item(item_id), - provenance_description VARCHAR2(2000), - license CLOB, - copyright_text CLOB, - side_bar_text VARCHAR2(2000), - workflow_step_1 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_2 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_3 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - submitter INTEGER REFERENCES EPersonGroup( eperson_group_id ), - admin INTEGER REFERENCES EPersonGroup( eperson_group_id ) -); - -INSERT INTO CollectionTemp -SELECT * FROM Collection; - -DROP TABLE Collection CASCADE CONSTRAINTS; -ALTER TABLE CollectionTemp RENAME TO Collection; - -ALTER TABLE Community2Collection ADD CONSTRAINT fk_c2c_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE Collection2Item ADD CONSTRAINT fk_c2i_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE WorkspaceItem ADD CONSTRAINT fk_wsi_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE WorkflowItem ADD CONSTRAINT fk_wfi_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE Subscription ADD CONSTRAINT fk_subs_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4__Upgrade_to_DSpace_1.4_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4__Upgrade_to_DSpace_1.4_schema.sql deleted file mode 100644 index 54cf10067b91..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4__Upgrade_to_DSpace_1.4_schema.sql +++ /dev/null @@ -1,371 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------------------------- --- Sequences for Group within Group feature -------------------------------------------------------------------------------- -CREATE SEQUENCE group2group_seq; -CREATE SEQUENCE group2groupcache_seq; - ------------------------------------------------------- --- Group2Group table, records group membership in other groups ------------------------------------------------------- -CREATE TABLE Group2Group -( - id INTEGER PRIMARY KEY, - parent_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - child_id INTEGER REFERENCES EPersonGroup(eperson_group_id) -); - ------------------------------------------------------- --- Group2GroupCache table, is the 'unwound' hierarchy in --- Group2Group. It explicitly names every parent child --- relationship, even with nested groups. For example, --- If Group2Group lists B is a child of A and C is a child of B, --- this table will have entries for parent(A,B), and parent(B,C) --- AND parent(A,C) so that all of the child groups of A can be --- looked up in a single simple query ------------------------------------------------------- -CREATE TABLE Group2GroupCache -( - id INTEGER PRIMARY KEY, - parent_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - child_id INTEGER REFERENCES EPersonGroup(eperson_group_id) -); - - -------------------------------------------------------- --- New Metadata Tables and Sequences -------------------------------------------------------- -CREATE SEQUENCE metadataschemaregistry_seq; -CREATE SEQUENCE metadatafieldregistry_seq; -CREATE SEQUENCE metadatavalue_seq; - --- MetadataSchemaRegistry table -CREATE TABLE MetadataSchemaRegistry -( - metadata_schema_id INTEGER PRIMARY KEY, - namespace VARCHAR(256) UNIQUE, - short_id VARCHAR(32) -); - --- MetadataFieldRegistry table -CREATE TABLE MetadataFieldRegistry -( - metadata_field_id INTEGER PRIMARY KEY, - metadata_schema_id INTEGER NOT NULL REFERENCES MetadataSchemaRegistry(metadata_schema_id), - element VARCHAR(64), - qualifier VARCHAR(64), - scope_note VARCHAR2(2000) -); - --- MetadataValue table -CREATE TABLE MetadataValue -( - metadata_value_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - metadata_field_id INTEGER REFERENCES MetadataFieldRegistry(metadata_field_id), - text_value VARCHAR2(2000), - text_lang VARCHAR(24), - place INTEGER -); - --- Create the DC schema -INSERT INTO MetadataSchemaRegistry VALUES (1,'http://dublincore.org/documents/dcmi-terms/','dc'); - --- Migrate the existing DCTypes into the new metadata field registry -INSERT INTO MetadataFieldRegistry - (metadata_schema_id, metadata_field_id, element, qualifier, scope_note) - SELECT '1' AS metadata_schema_id, dc_type_id, element, - qualifier, scope_note FROM dctyperegistry; - --- Copy the DCValues into the new MetadataValue table -INSERT INTO MetadataValue (item_id, metadata_field_id, text_value, text_lang, place) - SELECT item_id, dc_type_id, text_value, text_lang, place FROM dcvalue; - -DROP TABLE dcvalue; -CREATE VIEW dcvalue AS - SELECT MetadataValue.metadata_value_id AS "dc_value_id", MetadataValue.item_id, - MetadataValue.metadata_field_id AS "dc_type_id", MetadataValue.text_value, - MetadataValue.text_lang, MetadataValue.place - FROM MetadataValue, MetadataFieldRegistry - WHERE MetadataValue.metadata_field_id = MetadataFieldRegistry.metadata_field_id - AND MetadataFieldRegistry.metadata_schema_id = 1; - - --- After copying data from dctypregistry to metadataschemaregistry, we need to reset our sequences --- Update metadatafieldregistry_seq to new max value -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(metadata_field_id) INTO curr FROM metadatafieldregistry; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE metadatafieldregistry_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE metadatafieldregistry_seq START WITH ' || NVL(curr,1); -END; -/ --- Update metadatavalue_seq to new max value -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(metadata_value_id) INTO curr FROM metadatavalue; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE metadatavalue_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE metadatavalue_seq START WITH ' || NVL(curr,1); -END; -/ --- Update metadataschemaregistry_seq to new max value -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(metadata_schema_id) INTO curr FROM metadataschemaregistry; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE metadataschemaregistry_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE metadataschemaregistry_seq START WITH ' || NVL(curr,1); -END; -/ - --- Drop the old dctyperegistry -DROP TABLE dctyperegistry; - --- create indexes for the metadata tables -CREATE INDEX metadatavalue_item_idx ON MetadataValue(item_id); -CREATE INDEX metadatavalue_item_idx2 ON MetadataValue(item_id,metadata_field_id); -CREATE INDEX metadatafield_schema_idx ON MetadataFieldRegistry(metadata_schema_id); - - -------------------------------------------------------- --- Create the checksum checker tables -------------------------------------------------------- --- list of the possible results as determined --- by the system or an administrator - -CREATE TABLE checksum_results -( - result_code VARCHAR(64) PRIMARY KEY, - result_description VARCHAR2(2000) -); - - --- This table has a one-to-one relationship --- with the bitstream table. A row will be inserted --- every time a row is inserted into the bitstream table, and --- that row will be updated every time the checksum is --- re-calculated. - -CREATE TABLE most_recent_checksum -( - bitstream_id INTEGER PRIMARY KEY, - to_be_processed NUMBER(1) NOT NULL, - expected_checksum VARCHAR(64) NOT NULL, - current_checksum VARCHAR(64) NOT NULL, - last_process_start_date TIMESTAMP NOT NULL, - last_process_end_date TIMESTAMP NOT NULL, - checksum_algorithm VARCHAR(64) NOT NULL, - matched_prev_checksum NUMBER(1) NOT NULL, - result VARCHAR(64) REFERENCES checksum_results(result_code) -); - - --- A row will be inserted into this table every --- time a checksum is re-calculated. - -CREATE SEQUENCE checksum_history_seq; - -CREATE TABLE checksum_history -( - check_id INTEGER PRIMARY KEY, - bitstream_id INTEGER, - process_start_date TIMESTAMP, - process_end_date TIMESTAMP, - checksum_expected VARCHAR(64), - checksum_calculated VARCHAR(64), - result VARCHAR(64) REFERENCES checksum_results(result_code) -); - --- this will insert into the result code --- the initial results - -insert into checksum_results -values -( - 'INVALID_HISTORY', - 'Install of the cheksum checking code do not consider this history as valid' -); - -insert into checksum_results -values -( - 'BITSTREAM_NOT_FOUND', - 'The bitstream could not be found' -); - -insert into checksum_results -values -( - 'CHECKSUM_MATCH', - 'Current checksum matched previous checksum' -); - -insert into checksum_results -values -( - 'CHECKSUM_NO_MATCH', - 'Current checksum does not match previous checksum' -); - -insert into checksum_results -values -( - 'CHECKSUM_PREV_NOT_FOUND', - 'Previous checksum was not found: no comparison possible' -); - -insert into checksum_results -values -( - 'BITSTREAM_INFO_NOT_FOUND', - 'Bitstream info not found' -); - -insert into checksum_results -values -( - 'CHECKSUM_ALGORITHM_INVALID', - 'Invalid checksum algorithm' -); -insert into checksum_results -values -( - 'BITSTREAM_NOT_PROCESSED', - 'Bitstream marked to_be_processed=false' -); -insert into checksum_results -values -( - 'BITSTREAM_MARKED_DELETED', - 'Bitstream marked deleted in bitstream table' -); - --- this will insert into the most recent checksum --- on install all existing bitstreams --- setting all bitstreams already set as --- deleted to not be processed - -insert into most_recent_checksum -( - bitstream_id, - to_be_processed, - expected_checksum, - current_checksum, - last_process_start_date, - last_process_end_date, - checksum_algorithm, - matched_prev_checksum -) -select - bitstream.bitstream_id, - '1', - CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END, - CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END, - TO_TIMESTAMP(TO_CHAR(current_timestamp, 'DD-MM-RRRR HH24:MI:SS'), 'DD-MM-RRRR HH24:MI:SS'), - TO_TIMESTAMP(TO_CHAR(current_timestamp, 'DD-MM-RRRR HH24:MI:SS'), 'DD-MM-RRRR HH24:MI:SS'), - CASE WHEN bitstream.checksum_algorithm IS NULL THEN 'MD5' ELSE bitstream.checksum_algorithm END, - '1' -from bitstream; - --- Update all the deleted checksums --- to not be checked --- because they have since been --- deleted from the system - -update most_recent_checksum -set to_be_processed = 0 -where most_recent_checksum.bitstream_id in ( -select bitstream_id -from bitstream where deleted = '1' ); - --- this will insert into history table --- for the initial start --- we want to tell the users to disregard the initial --- inserts into the checksum history table - -insert into checksum_history -( - bitstream_id, - process_start_date, - process_end_date, - checksum_expected, - checksum_calculated -) -select most_recent_checksum.bitstream_id, - most_recent_checksum.last_process_end_date, - TO_TIMESTAMP(TO_CHAR(current_timestamp, 'DD-MM-RRRR HH24:MI:SS'), 'DD-MM-RRRR HH24:MI:SS'), - most_recent_checksum.expected_checksum, - most_recent_checksum.expected_checksum -FROM most_recent_checksum; - --- update the history to indicate that this was --- the first time the software was installed -update checksum_history -set result = 'INVALID_HISTORY'; - - -------------------------------------------------------- --- Table and views for 'browse by subject' functionality -------------------------------------------------------- -CREATE SEQUENCE itemsbysubject_seq; - -------------------------------------------------------- --- ItemsBySubject table -------------------------------------------------------- -CREATE TABLE ItemsBySubject -( - items_by_subject_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - subject VARCHAR2(2000), - sort_subject VARCHAR2(2000) -); - --- index by sort_subject -CREATE INDEX sort_subject_idx on ItemsBySubject(sort_subject); - -------------------------------------------------------- --- CollectionItemsBySubject view -------------------------------------------------------- -CREATE VIEW CollectionItemsBySubject as -SELECT Collection2Item.collection_id, ItemsBySubject.* -FROM ItemsBySubject, Collection2Item -WHERE ItemsBySubject.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsBySubject view -------------------------------------------------------- -CREATE VIEW CommunityItemsBySubject as -SELECT Communities2Item.community_id, ItemsBySubject.* -FROM ItemsBySubject, Communities2Item -WHERE ItemsBySubject.item_id = Communities2Item.item_id -; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.5__Upgrade_to_DSpace_1.5_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.5__Upgrade_to_DSpace_1.5_schema.sql deleted file mode 100644 index bb217bd0d18d..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.5__Upgrade_to_DSpace_1.5_schema.sql +++ /dev/null @@ -1,142 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - --- Remove NOT NULL restrictions from the checksum columns of most_recent_checksum -ALTER TABLE most_recent_checksum MODIFY expected_checksum null; -ALTER TABLE most_recent_checksum MODIFY current_checksum null; - ------------------------------------------------------- --- New Column language language in EPerson ------------------------------------------------------- - -alter table eperson ADD language VARCHAR2(64); -update eperson set language = 'en'; - --- totally unused column -alter table bundle drop column mets_bitstream_id; - -------------------------------------------------------------------------------- --- Necessary for Configurable Submission functionality: --- Modification to workspaceitem table to support keeping track --- of the last page reached within a step in the Configurable Submission Process -------------------------------------------------------------------------------- -ALTER TABLE workspaceitem ADD page_reached INTEGER; - - -------------------------------------------------------------------------- --- Increase the mimetype field size to support larger types, such as the --- new Word 2007 mimetypes. -------------------------------------------------------------------------- -ALTER TABLE BitstreamFormatRegistry MODIFY (mimetype VARCHAR(256)); - - -------------------------------------------------------------------------- --- Tables to manage cache of item counts for communities and collections -------------------------------------------------------------------------- - -CREATE TABLE collection_item_count ( - collection_id INTEGER PRIMARY KEY REFERENCES collection(collection_id), - count INTEGER -); - -CREATE TABLE community_item_count ( - community_id INTEGER PRIMARY KEY REFERENCES community(community_id), - count INTEGER -); - ------------------------------------------------------------------- --- Remove sequences and tables of the old browse system ------------------------------------------------------------------- - -DROP SEQUENCE itemsbyauthor_seq; -DROP SEQUENCE itemsbytitle_seq; -DROP SEQUENCE itemsbydate_seq; -DROP SEQUENCE itemsbydateaccessioned_seq; -DROP SEQUENCE itemsbysubject_seq; - -DROP TABLE ItemsByAuthor CASCADE CONSTRAINTS; -DROP TABLE ItemsByTitle CASCADE CONSTRAINTS; -DROP TABLE ItemsByDate CASCADE CONSTRAINTS; -DROP TABLE ItemsByDateAccessioned CASCADE CONSTRAINTS; -DROP TABLE ItemsBySubject CASCADE CONSTRAINTS; - -DROP TABLE History CASCADE CONSTRAINTS; -DROP TABLE HistoryState CASCADE CONSTRAINTS; - ----------------------------------------------------------------- --- Add indexes for foreign key columns ----------------------------------------------------------------- - -CREATE INDEX fe_bitstream_fk_idx ON FileExtension(bitstream_format_id); - -CREATE INDEX bit_bitstream_fk_idx ON Bitstream(bitstream_format_id); - -CREATE INDEX g2g_parent_fk_idx ON Group2Group(parent_id); -CREATE INDEX g2g_child_fk_idx ON Group2Group(child_id); - --- CREATE INDEX g2gc_parent_fk_idx ON Group2Group(parent_id); --- CREATE INDEX g2gc_child_fk_idx ON Group2Group(child_id); - -CREATE INDEX item_submitter_fk_idx ON Item(submitter_id); - -CREATE INDEX bundle_primary_fk_idx ON Bundle(primary_bitstream_id); - -CREATE INDEX item2bundle_bundle_fk_idx ON Item2Bundle(bundle_id); - -CREATE INDEX bundle2bits_bitstream_fk_idx ON Bundle2Bitstream(bitstream_id); - -CREATE INDEX metadatavalue_field_fk_idx ON MetadataValue(metadata_field_id); - -CREATE INDEX community_logo_fk_idx ON Community(logo_bitstream_id); - -CREATE INDEX collection_logo_fk_idx ON Collection(logo_bitstream_id); -CREATE INDEX collection_template_fk_idx ON Collection(template_item_id); -CREATE INDEX collection_workflow1_fk_idx ON Collection(workflow_step_1); -CREATE INDEX collection_workflow2_fk_idx ON Collection(workflow_step_2); -CREATE INDEX collection_workflow3_fk_idx ON Collection(workflow_step_3); -CREATE INDEX collection_submitter_fk_idx ON Collection(submitter); -CREATE INDEX collection_admin_fk_idx ON Collection(admin); - -CREATE INDEX com2com_parent_fk_idx ON Community2Community(parent_comm_id); -CREATE INDEX com2com_child_fk_idx ON Community2Community(child_comm_id); - -CREATE INDEX rp_eperson_fk_idx ON ResourcePolicy(eperson_id); -CREATE INDEX rp_epersongroup_fk_idx ON ResourcePolicy(epersongroup_id); - -CREATE INDEX epg2ep_eperson_fk_idx ON EPersonGroup2EPerson(eperson_id); - -CREATE INDEX workspace_item_fk_idx ON WorkspaceItem(item_id); -CREATE INDEX workspace_coll_fk_idx ON WorkspaceItem(collection_id); - --- CREATE INDEX workflow_item_fk_idx ON WorkflowItem(item_id); -CREATE INDEX workflow_coll_fk_idx ON WorkflowItem(collection_id); -CREATE INDEX workflow_owner_fk_idx ON WorkflowItem(owner); - -CREATE INDEX tasklist_eperson_fk_idx ON TasklistItem(eperson_id); -CREATE INDEX tasklist_workflow_fk_idx ON TasklistItem(workflow_id); - -CREATE INDEX subs_eperson_fk_idx ON Subscription(eperson_id); -CREATE INDEX subs_collection_fk_idx ON Subscription(collection_id); - -CREATE INDEX epg2wi_group_fk_idx ON epersongroup2workspaceitem(eperson_group_id); -CREATE INDEX epg2wi_workspace_fk_idx ON epersongroup2workspaceitem(workspace_item_id); - -CREATE INDEX Comm2Item_community_fk_idx ON Communities2Item( community_id ); - -CREATE INDEX mrc_result_fk_idx ON most_recent_checksum( result ); - -CREATE INDEX ch_result_fk_idx ON checksum_history( result ); - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.6__Upgrade_to_DSpace_1.6_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.6__Upgrade_to_DSpace_1.6_schema.sql deleted file mode 100644 index 659ca32983cc..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.6__Upgrade_to_DSpace_1.6_schema.sql +++ /dev/null @@ -1,93 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------------------- --- New Column for Community Admin - Delegated Admin patch (DS-228) ------------------------------------------------------------------- -ALTER TABLE community ADD admin INTEGER REFERENCES epersongroup ( eperson_group_id ); -CREATE INDEX community_admin_fk_idx ON Community(admin); - -------------------------------------------------------------------------- --- DS-236 schema changes for Authority Control of Metadata Values -------------------------------------------------------------------------- -ALTER TABLE MetadataValue - ADD ( authority VARCHAR(100), - confidence INTEGER DEFAULT -1); - --------------------------------------------------------------------------- --- DS-295 CC License being assigned incorrect Mime Type during submission. --------------------------------------------------------------------------- -UPDATE bitstream SET bitstream_format_id = - (SELECT bitstream_format_id FROM bitstreamformatregistry WHERE short_description = 'CC License') - WHERE name = 'license_text' AND source = 'org.dspace.license.CreativeCommons'; - -UPDATE bitstream SET bitstream_format_id = - (SELECT bitstream_format_id FROM bitstreamformatregistry WHERE short_description = 'RDF XML') - WHERE name = 'license_rdf' AND source = 'org.dspace.license.CreativeCommons'; - -------------------------------------------------------------------------- --- DS-260 Cleanup of Owning collection column for template item created --- with the JSPUI after the collection creation -------------------------------------------------------------------------- -UPDATE item SET owning_collection = null WHERE item_id IN - (SELECT template_item_id FROM collection WHERE template_item_id IS NOT null); - --- Recreate restraints with a know name and deferrable option! --- (The previous version of these constraints is dropped by org.dspace.storage.rdbms.migration.V1_5_9__Drop_constraint_for_DSpace_1_6_schema) -ALTER TABLE community2collection ADD CONSTRAINT comm2coll_collection_fk FOREIGN KEY (collection_id) REFERENCES collection DEFERRABLE; -ALTER TABLE community2community ADD CONSTRAINT com2com_child_fk FOREIGN KEY (child_comm_id) REFERENCES community DEFERRABLE; -ALTER TABLE collection2item ADD CONSTRAINT coll2item_item_fk FOREIGN KEY (item_id) REFERENCES item DEFERRABLE; - - ------------------------------------------------------------------- --- New tables /sequences for the harvester functionality (DS-289) ------------------------------------------------------------------- -CREATE SEQUENCE harvested_collection_seq; -CREATE SEQUENCE harvested_item_seq; - -------------------------------------------------------- --- Create the harvest settings table -------------------------------------------------------- --- Values used by the OAIHarvester to harvest a collection --- HarvestInstance is the DAO class for this table - -CREATE TABLE harvested_collection -( - collection_id INTEGER REFERENCES collection(collection_id) ON DELETE CASCADE, - harvest_type INTEGER, - oai_source VARCHAR(256), - oai_set_id VARCHAR(256), - harvest_message VARCHAR2(512), - metadata_config_id VARCHAR(256), - harvest_status INTEGER, - harvest_start_time TIMESTAMP, - last_harvested TIMESTAMP, - id INTEGER PRIMARY KEY -); - -CREATE INDEX harvested_collection_fk_idx ON harvested_collection(collection_id); - - -CREATE TABLE harvested_item -( - item_id INTEGER REFERENCES item(item_id) ON DELETE CASCADE, - last_harvested TIMESTAMP, - oai_id VARCHAR(64), - id INTEGER PRIMARY KEY -); - -CREATE INDEX harvested_item_fk_idx ON harvested_item(item_id); - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V3.0__Upgrade_to_DSpace_3.x_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V3.0__Upgrade_to_DSpace_3.x_schema.sql deleted file mode 100644 index 472dc7dc5279..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V3.0__Upgrade_to_DSpace_3.x_schema.sql +++ /dev/null @@ -1,52 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -ALTER TABLE resourcepolicy - ADD ( - rpname VARCHAR2(30), - rptype VARCHAR2(30), - rpdescription VARCHAR2(100) - ); - - -ALTER TABLE item ADD discoverable NUMBER(1); - -CREATE TABLE versionhistory -( - versionhistory_id INTEGER NOT NULL PRIMARY KEY -); - -CREATE TABLE versionitem -( - versionitem_id INTEGER NOT NULL PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - version_number INTEGER, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - version_date TIMESTAMP, - version_summary VARCHAR2(255), - versionhistory_id INTEGER REFERENCES VersionHistory(versionhistory_id) -); - -CREATE SEQUENCE versionitem_seq; -CREATE SEQUENCE versionhistory_seq; - - -------------------------------------------- --- New columns and longer hash for salted password hashing DS-861 -- -------------------------------------------- -ALTER TABLE EPerson modify( password VARCHAR(128)); -ALTER TABLE EPerson ADD salt VARCHAR(32); -ALTER TABLE EPerson ADD digest_algorithm VARCHAR(16); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.0__Upgrade_to_DSpace_4.x_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.0__Upgrade_to_DSpace_4.x_schema.sql deleted file mode 100644 index 8102376906a3..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.0__Upgrade_to_DSpace_4.x_schema.sql +++ /dev/null @@ -1,88 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------- --- Ensure that discoverable has a sensible default -------------------------------------------- -update item set discoverable=1 WHERE discoverable IS NULL; - -------------------------------------------- --- Add support for DOIs (table and seq.) -- -------------------------------------------- - -CREATE TABLE Doi -( - doi_id INTEGER PRIMARY KEY, - doi VARCHAR2(256) UNIQUE, - resource_type_id INTEGER, - resource_id INTEGER, - status INTEGER -); - -CREATE SEQUENCE doi_seq; - --- index by resource id and resource type id -CREATE INDEX doi_resource_id_type_idx ON doi(resource_id, resource_type_id); - -------------------------------------------- --- Table of running web applications for 'dspace version' -- -------------------------------------------- - -CREATE TABLE Webapp -( - webapp_id INTEGER NOT NULL PRIMARY KEY, - AppName VARCHAR2(32), - URL VARCHAR2(1000), - Started TIMESTAMP, - isUI NUMBER(1) -); - -CREATE SEQUENCE webapp_seq; - -------------------------------------------------------- --- DS-824 RequestItem table -------------------------------------------------------- - -CREATE TABLE requestitem -( - requestitem_id INTEGER NOT NULL, - token varchar(48), - item_id INTEGER, - bitstream_id INTEGER, - allfiles NUMBER(1), - request_email VARCHAR2(64), - request_name VARCHAR2(64), - request_date TIMESTAMP, - accept_request NUMBER(1), - decision_date TIMESTAMP, - expires TIMESTAMP, - CONSTRAINT requestitem_pkey PRIMARY KEY (requestitem_id), - CONSTRAINT requestitem_token_key UNIQUE (token) -); - -CREATE SEQUENCE requestitem_seq; - -------------------------------------------------------- --- DS-1655 Disable "Initial Questions" page in Submission UI by default -------------------------------------------------------- -update workspaceitem set multiple_titles=1, published_before=1, multiple_files=1; -update workflowitem set multiple_titles=1, published_before=1, multiple_files=1; - -------------------------------------------------------- --- DS-1811 Removing a collection fails if non-Solr DAO has been used before for item count -------------------------------------------------------- -delete from collection_item_count; -delete from community_item_count; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.9_2015.10.26__DS-2818_registry_update.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.9_2015.10.26__DS-2818_registry_update.sql deleted file mode 100644 index 6d75905ec980..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.9_2015.10.26__DS-2818_registry_update.sql +++ /dev/null @@ -1,64 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - --- Special case of migration, we need to the EPerson schema in order to get our metadata for all queries to work --- but we cannot a DB connection until our database is up to date, so we need to create our registries manually in sql - -INSERT INTO metadataschemaregistry (metadata_schema_id, namespace, short_id) SELECT metadataschemaregistry_seq.nextval, 'http://dspace.org/eperson' as namespace, 'eperson' as short_id FROM dual - WHERE NOT EXISTS (SELECT metadata_schema_id,namespace,short_id FROM metadataschemaregistry WHERE namespace = 'http://dspace.org/eperson' AND short_id = 'eperson'); - - --- Insert eperson.firstname -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'firstname' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'firstname' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert eperson.lastname -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'lastname' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'lastname' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert eperson.phone -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'phone' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'phone' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert eperson.language -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'language' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'language' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert into dc.provenance -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc'), 'provenance' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'provenance' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc')); - --- Insert into dc.rights.license -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element, qualifier) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc'), 'rights', 'license' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element,qualifier FROM metadatafieldregistry WHERE element = 'rights' AND qualifier='license' AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc')); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql deleted file mode 100644 index 8f0cd0d5e1d7..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql +++ /dev/null @@ -1,333 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------- --- DS-1582 Metadata on all DSpace Objects --- NOTE: This script also has a complimentary Flyway Java Migration --- which drops the "item_id" constraint on metadatavalue --- org.dspace.storage.rdbms.migration.V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint ------------------------------------------------------- -alter table metadatavalue rename column item_id to resource_id; - -alter table metadatavalue MODIFY(resource_id not null); -alter table metadatavalue add resource_type_id integer; -UPDATE metadatavalue SET resource_type_id = 2; -alter table metadatavalue MODIFY(resource_type_id not null); - - - --- --------- --- community --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, -introductory_text AS text_value, -null AS text_lang, -0 AS place -FROM community where not introductory_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id, -short_description AS text_value, -null AS text_lang, -0 AS place -FROM community where not short_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id, -side_bar_text AS text_value, -null AS text_lang, -0 AS place -FROM community where not side_bar_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id, -copyright_text AS text_value, -null AS text_lang, -0 AS place -FROM community where not copyright_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM community where not name is null; - -alter table community drop (introductory_text, short_description, side_bar_text, copyright_text, name); - - --- ---------- --- collection --- ---------- - - - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, -introductory_text AS text_value, -null AS text_lang, -0 AS place -FROM collection where not introductory_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id, -short_description AS text_value, -null AS text_lang, -0 AS place -FROM collection where not short_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id, -side_bar_text AS text_value, -null AS text_lang, -0 AS place -FROM collection where not side_bar_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id, -copyright_text AS text_value, -null AS text_lang, -0 AS place -FROM collection where not copyright_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM collection where not name is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'provenance' and qualifier is null) AS metadata_field_id, -provenance_description AS text_value, -null AS text_lang, -0 AS place -FROM collection where not provenance_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier = 'license') AS metadata_field_id, -license AS text_value, -null AS text_lang, -0 AS place -FROM collection where not license is null; - -alter table collection drop (introductory_text, short_description, copyright_text, side_bar_text, name, license, provenance_description); - - --- --------- --- bundle --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bundle_id AS resource_id, -1 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM bundle where not name is null; - -alter table bundle drop column name; - - - --- --------- --- bitstream --- --------- - - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not name is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, -description AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'format' and qualifier is null) AS metadata_field_id, -user_format_description AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not user_format_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'source' and qualifier is null) AS metadata_field_id, -source AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not source is null; - -alter table bitstream drop (name, description, user_format_description, source); - - --- --------- --- epersongroup --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_group_id AS resource_id, -6 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM epersongroup where not name is null; - -alter table epersongroup drop column name; - - - --- --------- --- eperson --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'firstname' and qualifier is null) AS metadata_field_id, -firstname AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not firstname is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'lastname' and qualifier is null) AS metadata_field_id, -lastname AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not lastname is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'phone' and qualifier is null) AS metadata_field_id, -phone AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not phone is null; - - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'language' and qualifier is null) AS metadata_field_id, -language AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not language is null; - -alter table eperson drop (firstname, lastname, phone, language); - --- --------- --- dcvalue view --- --------- - -drop view dcvalue; - -CREATE VIEW dcvalue AS - SELECT MetadataValue.metadata_value_id AS "dc_value_id", MetadataValue.resource_id, - MetadataValue.metadata_field_id AS "dc_type_id", MetadataValue.text_value, - MetadataValue.text_lang, MetadataValue.place - FROM MetadataValue, MetadataFieldRegistry - WHERE MetadataValue.metadata_field_id = MetadataFieldRegistry.metadata_field_id - AND MetadataFieldRegistry.metadata_schema_id = 1 AND MetadataValue.resource_type_id = 2; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.6_2016.08.23__DS-3097.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.6_2016.08.23__DS-3097.sql deleted file mode 100644 index 2e09b807de3b..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.6_2016.08.23__DS-3097.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3097 introduced new action id for WITHDRAWN_READ ------------------------------------------------------- - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 0 and resource_id in ( - SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream - LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id - LEFT JOIN item ON item2bundle.item_id = item.item_id - WHERE item.withdrawn = 1 -); - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 1 and resource_id in ( - SELECT item2bundle.bundle_id FROM item2bundle - LEFT JOIN item ON item2bundle.item_id = item.item_id - WHERE item.withdrawn = 1 -); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.7_2017.04.11__DS-3563_Index_metadatavalue_resource_type_id_column.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.7_2017.04.11__DS-3563_Index_metadatavalue_resource_type_id_column.sql deleted file mode 100644 index 9f9836faf471..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.7_2017.04.11__DS-3563_Index_metadatavalue_resource_type_id_column.sql +++ /dev/null @@ -1,23 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3563 Missing database index on metadatavalue.resource_type_id ------------------------------------------------------- --- Create an index on the metadata value resource_type_id column so that it can be searched efficiently. -declare - index_not_exists EXCEPTION; - PRAGMA EXCEPTION_INIT(index_not_exists, -1418); -begin - - execute immediate 'DROP INDEX metadatavalue_type_id_idx'; - exception - when index_not_exists then null; -end; -/ -CREATE INDEX metadatavalue_type_id_idx ON metadatavalue (resource_type_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015.03.07__DS-2701_Hibernate_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015.03.07__DS-2701_Hibernate_migration.sql deleted file mode 100644 index dd857e763df0..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015.03.07__DS-2701_Hibernate_migration.sql +++ /dev/null @@ -1,469 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2701 Service based API / Hibernate integration ------------------------------------------------------- -DROP VIEW community2item; - -CREATE TABLE dspaceobject -( - uuid RAW(16) NOT NULL PRIMARY KEY -); - -CREATE TABLE site -( - uuid RAW(16) NOT NULL PRIMARY KEY REFERENCES dspaceobject(uuid) -); - -ALTER TABLE eperson ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM eperson; -ALTER TABLE eperson ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE eperson MODIFY uuid NOT NULL; -ALTER TABLE eperson ADD CONSTRAINT eperson_id_unique PRIMARY KEY (uuid); -UPDATE eperson SET require_certificate = '0' WHERE require_certificate IS NULL; -UPDATE eperson SET self_registered = '0' WHERE self_registered IS NULL; - - - -UPDATE metadatavalue SET text_value='Administrator' - WHERE resource_type_id=6 AND resource_id=1; -UPDATE metadatavalue SET text_value='Anonymous' - WHERE resource_type_id=6 AND resource_id=0; - -ALTER TABLE epersongroup ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM epersongroup; -ALTER TABLE epersongroup ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE epersongroup MODIFY uuid NOT NULL; -ALTER TABLE epersongroup ADD CONSTRAINT epersongroup_id_unique PRIMARY KEY (uuid); - -ALTER TABLE item ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM item; -ALTER TABLE item ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE item MODIFY uuid NOT NULL; -ALTER TABLE item ADD CONSTRAINT item_id_unique PRIMARY KEY (uuid); - -ALTER TABLE community ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM community; -ALTER TABLE community ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE community MODIFY uuid NOT NULL; -ALTER TABLE community ADD CONSTRAINT community_id_unique PRIMARY KEY (uuid); - - -ALTER TABLE collection ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM collection; -ALTER TABLE collection ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE collection MODIFY uuid NOT NULL; -ALTER TABLE collection ADD CONSTRAINT collection_id_unique PRIMARY KEY (uuid); - -ALTER TABLE bundle ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM bundle; -ALTER TABLE bundle ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE bundle MODIFY uuid NOT NULL; -ALTER TABLE bundle ADD CONSTRAINT bundle_id_unique PRIMARY KEY (uuid); - -ALTER TABLE bitstream ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM bitstream; -ALTER TABLE bitstream ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE bitstream MODIFY uuid NOT NULL; -ALTER TABLE bitstream ADD CONSTRAINT bitstream_id_unique PRIMARY KEY (uuid); -UPDATE bitstream SET sequence_id = -1 WHERE sequence_id IS NULL; -UPDATE bitstream SET size_bytes = -1 WHERE size_bytes IS NULL; -UPDATE bitstream SET deleted = '0' WHERE deleted IS NULL; -UPDATE bitstream SET store_number = -1 WHERE store_number IS NULL; - --- Migrate EPersonGroup2EPerson table -ALTER TABLE EPersonGroup2EPerson RENAME COLUMN eperson_group_id to eperson_group_legacy_id; -ALTER TABLE EPersonGroup2EPerson RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE EPersonGroup2EPerson ADD eperson_group_id RAW(16) REFERENCES EpersonGroup(uuid); -ALTER TABLE EPersonGroup2EPerson ADD eperson_id RAW(16) REFERENCES Eperson(uuid); -CREATE INDEX EpersonGroup2Eperson_group on EpersonGroup2Eperson(eperson_group_id); -CREATE INDEX EpersonGroup2Eperson_person on EpersonGroup2Eperson(eperson_id); -UPDATE EPersonGroup2EPerson SET eperson_group_id = (SELECT EPersonGroup.uuid FROM EpersonGroup WHERE EPersonGroup2EPerson.eperson_group_legacy_id = EPersonGroup.eperson_group_id); -UPDATE EPersonGroup2EPerson SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE EPersonGroup2EPerson.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE EPersonGroup2EPerson MODIFY eperson_group_id NOT NULL; -ALTER TABLE EPersonGroup2EPerson MODIFY eperson_id NOT NULL; -ALTER TABLE EPersonGroup2EPerson DROP COLUMN eperson_group_legacy_id; -ALTER TABLE EPersonGroup2EPerson DROP COLUMN eperson_legacy_id; -ALTER TABLE epersongroup2eperson DROP COLUMN id; -ALTER TABLE EPersonGroup2EPerson add CONSTRAINT EPersonGroup2EPerson_unique primary key (eperson_group_id,eperson_id); - --- Migrate GROUP2GROUP table -ALTER TABLE Group2Group RENAME COLUMN parent_id to parent_legacy_id; -ALTER TABLE Group2Group RENAME COLUMN child_id to child_legacy_id; -ALTER TABLE Group2Group ADD parent_id RAW(16) REFERENCES EpersonGroup(uuid); -ALTER TABLE Group2Group ADD child_id RAW(16) REFERENCES EpersonGroup(uuid); -CREATE INDEX Group2Group_parent on Group2Group(parent_id); -CREATE INDEX Group2Group_child on Group2Group(child_id); -UPDATE Group2Group SET parent_id = (SELECT EPersonGroup.uuid FROM EpersonGroup WHERE Group2Group.parent_legacy_id = EPersonGroup.eperson_group_id); -UPDATE Group2Group SET child_id = (SELECT EpersonGroup.uuid FROM EpersonGroup WHERE Group2Group.child_legacy_id = EpersonGroup.eperson_group_id); -ALTER TABLE Group2Group MODIFY parent_id NOT NULL; -ALTER TABLE Group2Group MODIFY child_id NOT NULL; -ALTER TABLE Group2Group DROP COLUMN parent_legacy_id; -ALTER TABLE Group2Group DROP COLUMN child_legacy_id; -ALTER TABLE Group2Group DROP COLUMN id; -ALTER TABLE Group2Group add CONSTRAINT Group2Group_unique primary key (parent_id,child_id); - --- Migrate collection2item -ALTER TABLE Collection2Item RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE Collection2Item RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE Collection2Item ADD collection_id RAW(16) REFERENCES Collection(uuid); -ALTER TABLE Collection2Item ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX Collecion2Item_collection on Collection2Item(collection_id); -CREATE INDEX Collecion2Item_item on Collection2Item(item_id); -UPDATE Collection2Item SET collection_id = (SELECT Collection.uuid FROM Collection WHERE Collection2Item.collection_legacy_id = Collection.collection_id); -UPDATE Collection2Item SET item_id = (SELECT Item.uuid FROM Item WHERE Collection2Item.item_legacy_id = Item.item_id); -ALTER TABLE Collection2Item MODIFY collection_id NOT NULL; -ALTER TABLE Collection2Item MODIFY item_id NOT NULL; -ALTER TABLE Collection2Item DROP COLUMN collection_legacy_id; -ALTER TABLE Collection2Item DROP COLUMN item_legacy_id; -ALTER TABLE Collection2Item DROP COLUMN id; --- Magic query that will delete all duplicate collection item_id references from the database (if we don't do this the primary key creation will fail) -DELETE FROM collection2item WHERE rowid NOT IN (SELECT MIN(rowid) FROM collection2item GROUP BY collection_id,item_id); -ALTER TABLE Collection2Item add CONSTRAINT collection2item_unique primary key (collection_id,item_id); - --- Migrate Community2Community -ALTER TABLE Community2Community RENAME COLUMN parent_comm_id to parent_legacy_id; -ALTER TABLE Community2Community RENAME COLUMN child_comm_id to child_legacy_id; -ALTER TABLE Community2Community ADD parent_comm_id RAW(16) REFERENCES Community(uuid); -ALTER TABLE Community2Community ADD child_comm_id RAW(16) REFERENCES Community(uuid); -CREATE INDEX Community2Community_parent on Community2Community(parent_comm_id); -CREATE INDEX Community2Community_child on Community2Community(child_comm_id); -UPDATE Community2Community SET parent_comm_id = (SELECT Community.uuid FROM Community WHERE Community2Community.parent_legacy_id = Community.community_id); -UPDATE Community2Community SET child_comm_id = (SELECT Community.uuid FROM Community WHERE Community2Community.child_legacy_id = Community.community_id); -ALTER TABLE Community2Community MODIFY parent_comm_id NOT NULL; -ALTER TABLE Community2Community MODIFY child_comm_id NOT NULL; -ALTER TABLE Community2Community DROP COLUMN parent_legacy_id; -ALTER TABLE Community2Community DROP COLUMN child_legacy_id; -ALTER TABLE Community2Community DROP COLUMN id; -ALTER TABLE Community2Community add CONSTRAINT Community2Community_unique primary key (parent_comm_id,child_comm_id); - --- Migrate community2collection -ALTER TABLE community2collection RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE community2collection RENAME COLUMN community_id to community_legacy_id; -ALTER TABLE community2collection ADD collection_id RAW(16) REFERENCES Collection(uuid); -ALTER TABLE community2collection ADD community_id RAW(16) REFERENCES Community(uuid); -CREATE INDEX community2collection_collectio on community2collection(collection_id); -CREATE INDEX community2collection_community on community2collection(community_id); -UPDATE community2collection SET collection_id = (SELECT Collection.uuid FROM Collection WHERE community2collection.collection_legacy_id = Collection.collection_id); -UPDATE community2collection SET community_id = (SELECT Community.uuid FROM Community WHERE community2collection.community_legacy_id = Community.community_id); -ALTER TABLE community2collection MODIFY collection_id NOT NULL; -ALTER TABLE community2collection MODIFY community_id NOT NULL; -ALTER TABLE community2collection DROP COLUMN collection_legacy_id; -ALTER TABLE community2collection DROP COLUMN community_legacy_id; -ALTER TABLE community2collection DROP COLUMN id; -ALTER TABLE community2collection add CONSTRAINT community2collection_unique primary key (collection_id,community_id); - - --- Migrate Group2GroupCache table -ALTER TABLE Group2GroupCache RENAME COLUMN parent_id to parent_legacy_id; -ALTER TABLE Group2GroupCache RENAME COLUMN child_id to child_legacy_id; -ALTER TABLE Group2GroupCache ADD parent_id RAW(16) REFERENCES EpersonGroup(uuid); -ALTER TABLE Group2GroupCache ADD child_id RAW(16) REFERENCES EpersonGroup(uuid); -CREATE INDEX Group2GroupCache_parent on Group2GroupCache(parent_id); -CREATE INDEX Group2GroupCache_child on Group2GroupCache(child_id); -UPDATE Group2GroupCache SET parent_id = (SELECT EPersonGroup.uuid FROM EpersonGroup WHERE Group2GroupCache.parent_legacy_id = EPersonGroup.eperson_group_id); -UPDATE Group2GroupCache SET child_id = (SELECT EpersonGroup.uuid FROM EpersonGroup WHERE Group2GroupCache.child_legacy_id = EpersonGroup.eperson_group_id); -ALTER TABLE Group2GroupCache MODIFY parent_id NOT NULL; -ALTER TABLE Group2GroupCache MODIFY child_id NOT NULL; -ALTER TABLE Group2GroupCache DROP COLUMN parent_legacy_id; -ALTER TABLE Group2GroupCache DROP COLUMN child_legacy_id; -ALTER TABLE Group2GroupCache DROP COLUMN id; -ALTER TABLE Group2GroupCache add CONSTRAINT Group2GroupCache_unique primary key (parent_id,child_id); - --- Migrate Item2Bundle -ALTER TABLE item2bundle RENAME COLUMN bundle_id to bundle_legacy_id; -ALTER TABLE item2bundle RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE item2bundle ADD bundle_id RAW(16) REFERENCES Bundle(uuid); -ALTER TABLE item2bundle ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX item2bundle_bundle on item2bundle(bundle_id); -CREATE INDEX item2bundle_item on item2bundle(item_id); -UPDATE item2bundle SET bundle_id = (SELECT Bundle.uuid FROM Bundle WHERE item2bundle.bundle_legacy_id = Bundle.bundle_id); -UPDATE item2bundle SET item_id = (SELECT Item.uuid FROM Item WHERE item2bundle.item_legacy_id = Item.item_id); -ALTER TABLE item2bundle MODIFY bundle_id NOT NULL; -ALTER TABLE item2bundle MODIFY item_id NOT NULL; -ALTER TABLE item2bundle DROP COLUMN bundle_legacy_id; -ALTER TABLE item2bundle DROP COLUMN item_legacy_id; -ALTER TABLE item2bundle DROP COLUMN id; -ALTER TABLE item2bundle add CONSTRAINT item2bundle_unique primary key (bundle_id,item_id); - ---Migrate Bundle2Bitsteam -ALTER TABLE bundle2bitstream RENAME COLUMN bundle_id to bundle_legacy_id; -ALTER TABLE bundle2bitstream RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE bundle2bitstream ADD bundle_id RAW(16) REFERENCES Bundle(uuid); -ALTER TABLE bundle2bitstream ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX bundle2bitstream_bundle on bundle2bitstream(bundle_id); -CREATE INDEX bundle2bitstream_bitstream on bundle2bitstream(bitstream_id); -UPDATE bundle2bitstream SET bundle_id = (SELECT bundle.uuid FROM bundle WHERE bundle2bitstream.bundle_legacy_id = bundle.bundle_id); -UPDATE bundle2bitstream SET bitstream_id = (SELECT bitstream.uuid FROM bitstream WHERE bundle2bitstream.bitstream_legacy_id = bitstream.bitstream_id); -ALTER TABLE bundle2bitstream RENAME COLUMN bitstream_order to bitstream_order_legacy; -ALTER TABLE bundle2bitstream ADD bitstream_order INTEGER; -MERGE INTO bundle2bitstream dst -USING ( SELECT ROWID AS r_id - , ROW_NUMBER () OVER ( PARTITION BY bundle_id - ORDER BY bitstream_order_legacy, bitstream_id - ) AS new_order - FROM bundle2bitstream - ) src -ON (dst.ROWID = src.r_id) -WHEN MATCHED THEN UPDATE -SET dst.bitstream_order = (src.new_order-1) -; -ALTER TABLE bundle2bitstream MODIFY bundle_id NOT NULL; -ALTER TABLE bundle2bitstream MODIFY bitstream_id NOT NULL; -ALTER TABLE bundle2bitstream DROP COLUMN bundle_legacy_id; -ALTER TABLE bundle2bitstream DROP COLUMN bitstream_legacy_id; -ALTER TABLE bundle2bitstream DROP COLUMN id; -ALTER TABLE bundle2bitstream add CONSTRAINT bundle2bitstream_unique primary key (bitstream_id,bundle_id,bitstream_order); - - --- Migrate item -ALTER TABLE item RENAME COLUMN submitter_id to submitter_id_legacy_id; -ALTER TABLE item ADD submitter_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX item_submitter on item(submitter_id); -UPDATE item SET submitter_id = (SELECT eperson.uuid FROM eperson WHERE item.submitter_id_legacy_id = eperson.eperson_id); -ALTER TABLE item DROP COLUMN submitter_id_legacy_id; - -ALTER TABLE item RENAME COLUMN owning_collection to owning_collection_legacy; -ALTER TABLE item ADD owning_collection RAW(16) REFERENCES Collection(uuid); -CREATE INDEX item_collection on item(owning_collection); -UPDATE item SET owning_collection = (SELECT Collection.uuid FROM Collection WHERE item.owning_collection_legacy = collection.collection_id); -ALTER TABLE item DROP COLUMN owning_collection_legacy; - -UPDATE item SET in_archive = '0' WHERE in_archive IS NULL; -UPDATE item SET discoverable = '0' WHERE discoverable IS NULL; -UPDATE item SET withdrawn = '0' WHERE withdrawn IS NULL; - --- Migrate bundle -ALTER TABLE bundle RENAME COLUMN primary_bitstream_id to primary_bitstream_legacy_id; -ALTER TABLE bundle ADD primary_bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX bundle_primary on bundle(primary_bitstream_id); -UPDATE bundle SET primary_bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE bundle.primary_bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE bundle DROP COLUMN primary_bitstream_legacy_id; - - --- Migrate community references -ALTER TABLE Community RENAME COLUMN admin to admin_legacy; -ALTER TABLE Community ADD admin RAW(16) REFERENCES EPersonGroup(uuid); -CREATE INDEX Community_admin on Community(admin); -UPDATE Community SET admin = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Community.admin_legacy = EPersonGroup.eperson_group_id); -ALTER TABLE Community DROP COLUMN admin_legacy; - -ALTER TABLE Community RENAME COLUMN logo_bitstream_id to logo_bitstream_legacy_id; -ALTER TABLE Community ADD logo_bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX Community_bitstream on Community(logo_bitstream_id); -UPDATE Community SET logo_bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE Community.logo_bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE Community DROP COLUMN logo_bitstream_legacy_id; - - ---Migrate Collection references -ALTER TABLE Collection RENAME COLUMN workflow_step_1 to workflow_step_1_legacy; -ALTER TABLE Collection RENAME COLUMN workflow_step_2 to workflow_step_2_legacy; -ALTER TABLE Collection RENAME COLUMN workflow_step_3 to workflow_step_3_legacy; -ALTER TABLE Collection RENAME COLUMN submitter to submitter_legacy; -ALTER TABLE Collection RENAME COLUMN template_item_id to template_item_legacy_id; -ALTER TABLE Collection RENAME COLUMN logo_bitstream_id to logo_bitstream_legacy_id; -ALTER TABLE Collection RENAME COLUMN admin to admin_legacy; -ALTER TABLE Collection ADD workflow_step_1 RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD workflow_step_2 RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD workflow_step_3 RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD submitter RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD template_item_id RAW(16); -ALTER TABLE Collection ADD logo_bitstream_id RAW(16); -ALTER TABLE Collection ADD admin RAW(16) REFERENCES EPersonGroup(uuid); -CREATE INDEX Collection_workflow1 on Collection(workflow_step_1); -CREATE INDEX Collection_workflow2 on Collection(workflow_step_2); -CREATE INDEX Collection_workflow3 on Collection(workflow_step_3); -CREATE INDEX Collection_submitter on Collection(submitter); -CREATE INDEX Collection_template on Collection(template_item_id); -CREATE INDEX Collection_bitstream on Collection(logo_bitstream_id); -UPDATE Collection SET workflow_step_1 = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.workflow_step_1_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET workflow_step_2 = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.workflow_step_2_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET workflow_step_3 = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.workflow_step_3_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET submitter = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.submitter_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET template_item_id = (SELECT Item.uuid FROM Item WHERE Collection.template_item_legacy_id = Item.item_id); -UPDATE Collection SET logo_bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE Collection.logo_bitstream_legacy_id = Bitstream.bitstream_id); -UPDATE Collection SET admin = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.admin_legacy = EPersonGroup.eperson_group_id); -ALTER TABLE Collection DROP COLUMN workflow_step_1_legacy; -ALTER TABLE Collection DROP COLUMN workflow_step_2_legacy; -ALTER TABLE Collection DROP COLUMN workflow_step_3_legacy; -ALTER TABLE Collection DROP COLUMN submitter_legacy; -ALTER TABLE Collection DROP COLUMN template_item_legacy_id; -ALTER TABLE Collection DROP COLUMN logo_bitstream_legacy_id; -ALTER TABLE Collection DROP COLUMN admin_legacy; - - --- Migrate resource policy references -ALTER TABLE ResourcePolicy RENAME COLUMN eperson_id to eperson_id_legacy_id; -ALTER TABLE ResourcePolicy ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX resourcepolicy_person on resourcepolicy(eperson_id); -UPDATE ResourcePolicy SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE ResourcePolicy.eperson_id_legacy_id = eperson.eperson_id); -ALTER TABLE ResourcePolicy DROP COLUMN eperson_id_legacy_id; - -ALTER TABLE ResourcePolicy RENAME COLUMN epersongroup_id to epersongroup_id_legacy_id; -ALTER TABLE ResourcePolicy ADD epersongroup_id RAW(16) REFERENCES EPersonGroup(uuid); -CREATE INDEX resourcepolicy_group on resourcepolicy(epersongroup_id); -UPDATE ResourcePolicy SET epersongroup_id = (SELECT epersongroup.uuid FROM epersongroup WHERE ResourcePolicy.epersongroup_id_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE ResourcePolicy DROP COLUMN epersongroup_id_legacy_id; - -ALTER TABLE ResourcePolicy ADD dspace_object RAW(16) REFERENCES dspaceobject(uuid); -CREATE INDEX resourcepolicy_object on resourcepolicy(dspace_object); -UPDATE ResourcePolicy SET dspace_object = (SELECT eperson.uuid FROM eperson WHERE ResourcePolicy.resource_id = eperson.eperson_id AND ResourcePolicy.resource_type_id = 7) WHERE ResourcePolicy.resource_type_id = 7; -UPDATE ResourcePolicy SET dspace_object = (SELECT epersongroup.uuid FROM epersongroup WHERE ResourcePolicy.resource_id = epersongroup.eperson_group_id AND ResourcePolicy.resource_type_id = 6) WHERE ResourcePolicy.resource_type_id = 6; -UPDATE ResourcePolicy SET dspace_object = (SELECT community.uuid FROM community WHERE ResourcePolicy.resource_id = community.community_id AND ResourcePolicy.resource_type_id = 4) WHERE ResourcePolicy.resource_type_id = 4; -UPDATE ResourcePolicy SET dspace_object = (SELECT collection.uuid FROM collection WHERE ResourcePolicy.resource_id = collection.collection_id AND ResourcePolicy.resource_type_id = 3) WHERE ResourcePolicy.resource_type_id = 3; -UPDATE ResourcePolicy SET dspace_object = (SELECT item.uuid FROM item WHERE ResourcePolicy.resource_id = item.item_id AND ResourcePolicy.resource_type_id = 2) WHERE ResourcePolicy.resource_type_id = 2; -UPDATE ResourcePolicy SET dspace_object = (SELECT bundle.uuid FROM bundle WHERE ResourcePolicy.resource_id = bundle.bundle_id AND ResourcePolicy.resource_type_id = 1) WHERE ResourcePolicy.resource_type_id = 1; -UPDATE ResourcePolicy SET dspace_object = (SELECT bitstream.uuid FROM bitstream WHERE ResourcePolicy.resource_id = bitstream.bitstream_id AND ResourcePolicy.resource_type_id = 0) WHERE ResourcePolicy.resource_type_id = 0; -UPDATE resourcepolicy SET resource_type_id = -1 WHERE resource_type_id IS NULL; -UPDATE resourcepolicy SET action_id = -1 WHERE action_id IS NULL; - - --- Migrate Subscription -ALTER TABLE Subscription RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE Subscription ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX Subscription_person on Subscription(eperson_id); -UPDATE Subscription SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE Subscription.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE Subscription DROP COLUMN eperson_legacy_id; - -ALTER TABLE Subscription RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE Subscription ADD collection_id RAW(16) REFERENCES Collection(uuid); -CREATE INDEX Subscription_collection on Subscription(collection_id); -UPDATE Subscription SET collection_id = (SELECT collection.uuid FROM collection WHERE Subscription.collection_legacy_id = collection.collection_id); -ALTER TABLE Subscription DROP COLUMN collection_legacy_id; - - --- Migrate versionitem -ALTER TABLE versionitem RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE versionitem ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX versionitem_person on versionitem(eperson_id); -UPDATE versionitem SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE versionitem.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE versionitem DROP COLUMN eperson_legacy_id; - -ALTER TABLE versionitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE versionitem ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX versionitem_item on versionitem(item_id); -UPDATE versionitem SET item_id = (SELECT item.uuid FROM item WHERE versionitem.item_legacy_id = item.item_id); -ALTER TABLE versionitem DROP COLUMN item_legacy_id; -UPDATE versionitem SET version_number = -1 WHERE version_number IS NULL; - --- Migrate handle table -ALTER TABLE handle RENAME COLUMN resource_id to resource_legacy_id; -ALTER TABLE handle ADD resource_id RAW(16) REFERENCES dspaceobject(uuid); -CREATE INDEX handle_object on handle(resource_id); -UPDATE handle SET resource_id = (SELECT community.uuid FROM community WHERE handle.resource_legacy_id = community.community_id AND handle.resource_type_id = 4); -UPDATE handle SET resource_id = (SELECT collection.uuid FROM collection WHERE handle.resource_legacy_id = collection.collection_id AND handle.resource_type_id = 3); -UPDATE handle SET resource_id = (SELECT item.uuid FROM item WHERE handle.resource_legacy_id = item.item_id AND handle.resource_type_id = 2); - --- Migrate metadata value table -DROP VIEW dcvalue; - -ALTER TABLE metadatavalue ADD dspace_object_id RAW(16) REFERENCES dspaceobject(uuid); --- CREATE INDEX metadatavalue_field on metadatavalue(metadata_field_id); -CREATE INDEX metadatavalue_object on metadatavalue(dspace_object_id); -CREATE INDEX metadatavalue_field_object on metadatavalue(metadata_field_id, dspace_object_id); -UPDATE metadatavalue SET dspace_object_id = (SELECT eperson.uuid FROM eperson WHERE metadatavalue.resource_id = eperson.eperson_id AND metadatavalue.resource_type_id = 7) WHERE metadatavalue.resource_type_id= 7; -UPDATE metadatavalue SET dspace_object_id = (SELECT epersongroup.uuid FROM epersongroup WHERE metadatavalue.resource_id = epersongroup.eperson_group_id AND metadatavalue.resource_type_id = 6) WHERE metadatavalue.resource_type_id= 6; -UPDATE metadatavalue SET dspace_object_id = (SELECT community.uuid FROM community WHERE metadatavalue.resource_id = community.community_id AND metadatavalue.resource_type_id = 4) WHERE metadatavalue.resource_type_id= 4; -UPDATE metadatavalue SET dspace_object_id = (SELECT collection.uuid FROM collection WHERE metadatavalue.resource_id = collection.collection_id AND metadatavalue.resource_type_id = 3) WHERE metadatavalue.resource_type_id= 3; -UPDATE metadatavalue SET dspace_object_id = (SELECT item.uuid FROM item WHERE metadatavalue.resource_id = item.item_id AND metadatavalue.resource_type_id = 2) WHERE metadatavalue.resource_type_id= 2; -UPDATE metadatavalue SET dspace_object_id = (SELECT bundle.uuid FROM bundle WHERE metadatavalue.resource_id = bundle.bundle_id AND metadatavalue.resource_type_id = 1) WHERE metadatavalue.resource_type_id= 1; -UPDATE metadatavalue SET dspace_object_id = (SELECT bitstream.uuid FROM bitstream WHERE metadatavalue.resource_id = bitstream.bitstream_id AND metadatavalue.resource_type_id = 0) WHERE metadatavalue.resource_type_id= 0; -DROP INDEX metadatavalue_item_idx; -DROP INDEX metadatavalue_item_idx2; -ALTER TABLE metadatavalue DROP COLUMN resource_id; -ALTER TABLE metadatavalue DROP COLUMN resource_type_id; -UPDATE MetadataValue SET confidence = -1 WHERE confidence IS NULL; -UPDATE metadatavalue SET place = -1 WHERE place IS NULL; - --- Alter harvested item -ALTER TABLE harvested_item RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE harvested_item ADD item_id RAW(16) REFERENCES item(uuid); -CREATE INDEX harvested_item_item on harvested_item(item_id); -UPDATE harvested_item SET item_id = (SELECT item.uuid FROM item WHERE harvested_item.item_legacy_id = item.item_id); -ALTER TABLE harvested_item DROP COLUMN item_legacy_id; - --- Alter harvested collection -ALTER TABLE harvested_collection RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE harvested_collection ADD collection_id RAW(16) REFERENCES Collection(uuid); -CREATE INDEX harvested_collection_collectio on harvested_collection(collection_id); -UPDATE harvested_collection SET collection_id = (SELECT collection.uuid FROM collection WHERE harvested_collection.collection_legacy_id = collection.collection_id); -ALTER TABLE harvested_collection DROP COLUMN collection_legacy_id; - -UPDATE harvested_collection SET harvest_type = -1 WHERE harvest_type IS NULL; -UPDATE harvested_collection SET harvest_status = -1 WHERE harvest_status IS NULL; - - ---Alter workspaceitem -ALTER TABLE workspaceitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE workspaceitem ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX workspaceitem_item on workspaceitem(item_id); -UPDATE workspaceitem SET item_id = (SELECT item.uuid FROM item WHERE workspaceitem.item_legacy_id = item.item_id); -ALTER TABLE workspaceitem DROP COLUMN item_legacy_id; - -ALTER TABLE workspaceitem RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE workspaceitem ADD collection_id RAW(16) REFERENCES Collection(uuid); -CREATE INDEX workspaceitem_coll on workspaceitem(collection_id); -UPDATE workspaceitem SET collection_id = (SELECT collection.uuid FROM collection WHERE workspaceitem.collection_legacy_id = collection.collection_id); -ALTER TABLE workspaceitem DROP COLUMN collection_legacy_id; - -UPDATE workspaceitem SET multiple_titles = '0' WHERE multiple_titles IS NULL; -UPDATE workspaceitem SET published_before = '0' WHERE published_before IS NULL; -UPDATE workspaceitem SET multiple_files = '0' WHERE multiple_files IS NULL; -UPDATE workspaceitem SET stage_reached = -1 WHERE stage_reached IS NULL; -UPDATE workspaceitem SET page_reached = -1 WHERE page_reached IS NULL; - ---Alter epersongroup2workspaceitem -ALTER TABLE epersongroup2workspaceitem RENAME COLUMN eperson_group_id to eperson_group_legacy_id; -ALTER TABLE epersongroup2workspaceitem ADD eperson_group_id RAW(16) REFERENCES epersongroup(uuid); -CREATE INDEX epersongroup2workspaceitem_gro on epersongroup2workspaceitem(eperson_group_id); -UPDATE epersongroup2workspaceitem SET eperson_group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE epersongroup2workspaceitem.eperson_group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE epersongroup2workspaceitem DROP COLUMN eperson_group_legacy_id; - -ALTER TABLE epersongroup2workspaceitem DROP COLUMN id; -ALTER TABLE epersongroup2workspaceitem MODIFY workspace_item_id NOT NULL; -ALTER TABLE epersongroup2workspaceitem MODIFY eperson_group_id NOT NULL; -ALTER TABLE epersongroup2workspaceitem add CONSTRAINT epersongroup2wsitem_unqiue primary key (workspace_item_id,eperson_group_id); - ---Alter most_recent_checksum -ALTER TABLE most_recent_checksum RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE most_recent_checksum ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX most_recent_checksum_bitstream on most_recent_checksum(bitstream_id); -UPDATE most_recent_checksum SET bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE most_recent_checksum.bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE most_recent_checksum DROP COLUMN bitstream_legacy_id; - -UPDATE most_recent_checksum SET to_be_processed = '0' WHERE to_be_processed IS NULL; -UPDATE most_recent_checksum SET matched_prev_checksum = '0' WHERE matched_prev_checksum IS NULL; - ---Alter checksum_history -ALTER TABLE checksum_history RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE checksum_history ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX checksum_history_bitstream on checksum_history(bitstream_id); -UPDATE checksum_history SET bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE checksum_history.bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE checksum_history DROP COLUMN bitstream_legacy_id; - -RENAME checksum_history_seq TO checksum_history_check_id_seq; - ---Alter table doi -ALTER TABLE doi ADD dspace_object RAW(16) REFERENCES dspaceobject(uuid); -CREATE INDEX doi_object on doi(dspace_object); -UPDATE doi SET dspace_object = (SELECT community.uuid FROM community WHERE doi.resource_id = community.community_id AND doi.resource_type_id = 4) WHERE doi.resource_type_id = 4; -UPDATE doi SET dspace_object = (SELECT collection.uuid FROM collection WHERE doi.resource_id = collection.collection_id AND doi.resource_type_id = 3) WHERE doi.resource_type_id = 3; -UPDATE doi SET dspace_object = (SELECT item.uuid FROM item WHERE doi.resource_id = item.item_id AND doi.resource_type_id = 2) WHERE doi.resource_type_id = 2; -UPDATE doi SET dspace_object = (SELECT bundle.uuid FROM bundle WHERE doi.resource_id = bundle.bundle_id AND doi.resource_type_id = 1) WHERE doi.resource_type_id = 1; -UPDATE doi SET dspace_object = (SELECT bitstream.uuid FROM bitstream WHERE doi.resource_id = bitstream.bitstream_id AND doi.resource_type_id = 0) WHERE doi.resource_type_id = 0; - ---Update table bitstreamformatregistry -UPDATE bitstreamformatregistry SET support_level = -1 WHERE support_level IS NULL; - ---Update table requestitem -UPDATE requestitem SET allfiles = '0' WHERE allfiles IS NULL; -UPDATE requestitem SET accept_request = '0' WHERE accept_request IS NULL; - ---Update table webapp -UPDATE webapp SET isui = -1 WHERE isui IS NULL; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015_03_06_01__DS_3378_lost_oracle_indexes.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015_03_06_01__DS_3378_lost_oracle_indexes.sql deleted file mode 100644 index 8f1a7ad157a2..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015_03_06_01__DS_3378_lost_oracle_indexes.sql +++ /dev/null @@ -1,18 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS_3378 Lost oracle indexes ------------------------------------------------------- -CREATE UNIQUE INDEX eperson_eperson on eperson(eperson_id); -CREATE UNIQUE INDEX epersongroup_eperson_group on epersongroup(eperson_group_id); -CREATE UNIQUE INDEX community_community on community(community_id); -CREATE UNIQUE INDEX collection_collection on collection(collection_id); -CREATE UNIQUE INDEX item_item on item(item_id); -CREATE UNIQUE INDEX bundle_bundle on bundle(bundle_id); -CREATE UNIQUE INDEX bitstream_bitstream on bitstream(bitstream_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.01.03__DS-3024.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.01.03__DS-3024.sql deleted file mode 100644 index 8ad6f7fcd247..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.01.03__DS-3024.sql +++ /dev/null @@ -1,25 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3024 Invent "permanent" groups ------------------------------------------------------- - -ALTER TABLE epersongroup - ADD (permanent NUMBER(1) DEFAULT 0); -UPDATE epersongroup SET permanent = 1 - WHERE uuid IN ( - SELECT dspace_object_id - FROM metadataschemaregistry s - JOIN metadatafieldregistry f USING (metadata_schema_id) - JOIN metadatavalue v USING (metadata_field_id) - WHERE s.short_id = 'dc' - AND f.element = 'title' - AND f.qualifier IS NULL - AND dbms_lob.compare(v.text_value, 'Administrator') = 0 OR dbms_lob.compare(v.text_value,'Anonymous') = 0 - ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.02.25__DS-3004-slow-searching-as-admin.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.02.25__DS-3004-slow-searching-as-admin.sql deleted file mode 100644 index 18cb4a50841d..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.02.25__DS-3004-slow-searching-as-admin.sql +++ /dev/null @@ -1,30 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3024 extremely slow searching when logged in as admin ---------------------------------------------------------------- --- This script will put the group name on the epersongroup --- record itself for performance reasons. It will also make --- sure that a group name is unique (so that for example no two --- Administrator groups can be created). ---------------------------------------------------------------- - -ALTER TABLE epersongroup -ADD name VARCHAR2(250); - -CREATE UNIQUE INDEX epersongroup_unique_idx_name on epersongroup(name); - -UPDATE epersongroup -SET name = -(SELECT text_value - FROM metadatavalue v - JOIN metadatafieldregistry field on v.metadata_field_id = field.metadata_field_id - JOIN metadataschemaregistry s ON field.metadata_schema_id = s.metadata_schema_id - WHERE s.short_id = 'dc' AND element = 'title' AND qualifier IS NULL - AND v.dspace_object_id = epersongroup.uuid); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.01__DS-1955_Increase_embargo_reason.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.01__DS-1955_Increase_embargo_reason.sql deleted file mode 100644 index e0a103749c2b..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.01__DS-1955_Increase_embargo_reason.sql +++ /dev/null @@ -1,25 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------- --- DS-1955 resize rpdescription for embargo reason ------------------------------------------------------- - --- We cannot alter type between varchar2 & clob directly so an in between column is required -ALTER TABLE resourcepolicy ADD rpdescription_clob CLOB; -UPDATE resourcepolicy SET rpdescription_clob=rpdescription, rpdescription=null; -ALTER TABLE resourcepolicy DROP COLUMN rpdescription; -ALTER TABLE resourcepolicy RENAME COLUMN rpdescription_clob TO rpdescription; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.04__DS-3086-OAI-Performance-fix.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.04__DS-3086-OAI-Performance-fix.sql deleted file mode 100644 index 7b13d10b6d4f..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.04__DS-3086-OAI-Performance-fix.sql +++ /dev/null @@ -1,46 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3086 OAI Harvesting performance ---------------------------------------------------------------- --- This script will create indexes on the key fields of the --- metadataschemaregistry and metadatafieldregistry tables to --- increase the performance of the queries. It will also add --- "ON DELETE CASCADE" to improve the performance of Item deletion. ---------------------------------------------------------------- - -CREATE UNIQUE INDEX metadataschema_idx_short_id on metadataschemaregistry(short_id); - -CREATE INDEX metadatafield_idx_elem_qual on metadatafieldregistry(element, qualifier); - -CREATE INDEX resourcepolicy_idx_rptype on resourcepolicy(rptype); - --- Add "ON DELETE CASCADE" to foreign key constraint to Item -ALTER TABLE RESOURCEPOLICY ADD DSPACE_OBJECT_NEW RAW(16); -UPDATE RESOURCEPOLICY SET DSPACE_OBJECT_NEW = DSPACE_OBJECT; -ALTER TABLE RESOURCEPOLICY DROP COLUMN DSPACE_OBJECT; -ALTER TABLE RESOURCEPOLICY RENAME COLUMN DSPACE_OBJECT_NEW to DSPACE_OBJECT; - -ALTER TABLE RESOURCEPOLICY -ADD CONSTRAINT RESOURCEPOLICY_DSPACE_OBJ_FK -FOREIGN KEY (DSPACE_OBJECT) -REFERENCES dspaceobject(uuid) -ON DELETE CASCADE; - --- Add "ON DELETE CASCADE" to foreign key constraint to Item -ALTER TABLE METADATAVALUE ADD DSPACE_OBJECT_NEW RAW(16); -UPDATE METADATAVALUE SET DSPACE_OBJECT_NEW = DSPACE_OBJECT_ID; -ALTER TABLE METADATAVALUE DROP COLUMN DSPACE_OBJECT_ID; -ALTER TABLE METADATAVALUE RENAME COLUMN DSPACE_OBJECT_NEW to DSPACE_OBJECT_ID; - -ALTER TABLE METADATAVALUE -ADD CONSTRAINT METADATAVALUE_DSPACE_OBJECT_FK -FOREIGN KEY (DSPACE_OBJECT_ID) -REFERENCES DSPACEOBJECT(UUID) -ON DELETE CASCADE; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql deleted file mode 100644 index a1b303f0365a..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql +++ /dev/null @@ -1,33 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3125 Submitters cannot delete bistreams of workspaceitems ---------------------------------------------------------------- --- This script will add delete rights on all bundles/bitstreams --- for people who already have REMOVE rights. --- In previous versions REMOVE rights was enough to ensure that --- you could delete an object. ---------------------------------------------------------------- -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, start_date, end_date, rpname, -rptype, rpdescription, eperson_id, epersongroup_id, dspace_object) -SELECT -resourcepolicy_seq.nextval AS policy_id, -resource_type_id, -resource_id, --- Insert the Constants.DELETE action -2 AS action_id, -start_date, -end_date, -rpname, -rptype, -rpdescription, -eperson_id, -epersongroup_id, -dspace_object -FROM resourcepolicy WHERE action_id=4 AND (resource_type_id=0 OR resource_type_id=1 OR resource_type_id=2); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.05.10__DS-3168-fix-requestitem_item_id_column.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.05.10__DS-3168-fix-requestitem_item_id_column.sql deleted file mode 100644 index 2ba3517e1988..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.05.10__DS-3168-fix-requestitem_item_id_column.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3168 Embargo request Unknown Entity RequestItem ---------------------------------------------------------------- --- convert the item_id and bitstream_id columns from integer to UUID ---------------------------------------------------------------- -ALTER TABLE requestitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE requestitem ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX requestitem_item on requestitem(item_id); -UPDATE requestitem SET item_id = (SELECT item.uuid FROM item WHERE requestitem.item_legacy_id = item.item_id); -ALTER TABLE requestitem DROP COLUMN item_legacy_id; - -ALTER TABLE requestitem RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE requestitem ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX requestitem_bitstream on requestitem(bitstream_id); -UPDATE requestitem SET bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE requestitem.bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE requestitem DROP COLUMN bitstream_legacy_id; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.21__DS-2775.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.21__DS-2775.sql deleted file mode 100644 index 74783974468c..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.21__DS-2775.sql +++ /dev/null @@ -1,30 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2775 Drop unused sequences ------------------------------------------------------- - -DROP SEQUENCE bitstream_seq; -DROP SEQUENCE bundle2bitstream_seq; -DROP SEQUENCE bundle_seq; -DROP SEQUENCE collection2item_seq; -DROP SEQUENCE collection_seq; -DROP SEQUENCE community2collection_seq; -DROP SEQUENCE community2community_seq; -DROP SEQUENCE community_seq; -DROP SEQUENCE dcvalue_seq; -DROP SEQUENCE eperson_seq; -DROP SEQUENCE epersongroup2eperson_seq; -DROP SEQUENCE epersongroup2workspaceitem_seq; -DROP SEQUENCE epersongroup_seq; -DROP SEQUENCE group2group_seq; -DROP SEQUENCE group2groupcache_seq; -DROP SEQUENCE historystate_seq; -DROP SEQUENCE item2bundle_seq; -DROP SEQUENCE item_seq; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.26__DS-3277_fix_handle_assignment.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.26__DS-3277_fix_handle_assignment.sql deleted file mode 100644 index 96f125f78b61..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.26__DS-3277_fix_handle_assignment.sql +++ /dev/null @@ -1,44 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------------------------------------- --- DS-3277 : 'handle_id' column needs its own separate sequence, so that Handles --- can be minted from 'handle_seq' ----------------------------------------------------------------------------------- --- Create a new sequence for 'handle_id' column. --- The role of this sequence is to simply provide a unique internal ID to the database. -CREATE SEQUENCE handle_id_seq; --- Initialize new 'handle_id_seq' to the maximum value of 'handle_id' -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(handle_id) INTO curr FROM handle; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE handle_id_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE handle_id_seq START WITH ' || NVL(curr,1); -END; -/ - --- Ensure the 'handle_seq' is updated to the maximum *suffix* in 'handle' column, --- as this sequence is used to mint new Handles. --- Code borrowed from update-sequences.sql and updateseq.sql -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(to_number(regexp_replace(handle, '.*/', ''), '999999999999')) INTO curr FROM handle WHERE REGEXP_LIKE(handle, '^.*/[0123456789]*$'); - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE handle_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE handle_seq START WITH ' || NVL(curr,1); -END; -/ \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.08.23__DS-3097.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.08.23__DS-3097.sql deleted file mode 100644 index e1220c8c7cce..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.08.23__DS-3097.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3097 introduced new action id for WITHDRAWN_READ ------------------------------------------------------- - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and dspace_object in ( - SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream - LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id - LEFT JOIN item ON item2bundle.item_id = item.uuid - WHERE item.withdrawn = 1 -); - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and dspace_object in ( - SELECT item2bundle.bundle_id FROM item2bundle - LEFT JOIN item ON item2bundle.item_id = item.uuid - WHERE item.withdrawn = 1 -); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.29__DS-3410-lost-indexes.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.29__DS-3410-lost-indexes.sql deleted file mode 100644 index 5c3c3842aaea..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.29__DS-3410-lost-indexes.sql +++ /dev/null @@ -1,17 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3410 ---------------------------------------------------------------- --- This script will create lost indexes ---------------------------------------------------------------- - -CREATE INDEX resourcepolicy_object on resourcepolicy(dspace_object); -CREATE INDEX metadatavalue_object on metadatavalue(dspace_object_id); -CREATE INDEX metadatavalue_field_object on metadatavalue(metadata_field_id, dspace_object_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.30__DS-3409.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.30__DS-3409.sql deleted file mode 100644 index 47b2d18be8a3..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.30__DS-3409.sql +++ /dev/null @@ -1,16 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3097 Handle of collections and communities are lost due to bug at V6.0_2015.03.07__DS-2701_Hibernate_migration.sql ------------------------------------------------------- - -UPDATE handle SET resource_id = (SELECT community.uuid FROM community WHERE handle.resource_legacy_id = community.community_id AND handle.resource_type_id = 4) where handle.resource_type_id = 4; -UPDATE handle SET resource_id = (SELECT collection.uuid FROM collection WHERE handle.resource_legacy_id = collection.collection_id AND handle.resource_type_id = 3) where handle.resource_type_id = 3; -UPDATE handle SET resource_id = (SELECT item.uuid FROM item WHERE handle.resource_legacy_id = item.item_id AND handle.resource_type_id = 2) where handle.resource_type_id = 2; - \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.04.16__dspace-entities.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.04.16__dspace-entities.sql deleted file mode 100644 index fc1c0b2e2319..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.04.16__dspace-entities.sql +++ /dev/null @@ -1,65 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------- --- This will create the setup for the dspace 7 entities usage -------------------------------------------------------------- -CREATE SEQUENCE entity_type_id_seq; -CREATE SEQUENCE relationship_type_id_seq; -CREATE SEQUENCE relationship_id_seq; - -CREATE TABLE entity_type -( - id INTEGER NOT NULL PRIMARY KEY, - label varchar(32) UNIQUE NOT NULL -); - -CREATE TABLE relationship_type -( - id INTEGER NOT NULL PRIMARY KEY, - left_type INTEGER NOT NULL, - right_type INTEGER NOT NULL, - left_label varchar(32) NOT NULL, - right_label varchar(32) NOT NULL, - left_min_cardinality INTEGER, - left_max_cardinality INTEGER, - right_min_cardinality INTEGER, - right_max_cardinality INTEGER, - FOREIGN KEY (left_type) REFERENCES entity_type(id), - FOREIGN KEY (right_type) REFERENCES entity_type(id), - CONSTRAINT u_relationship_type_constraint UNIQUE (left_type, right_type, left_label, right_label) - -); - -CREATE TABLE relationship -( - id INTEGER NOT NULL PRIMARY KEY, - left_id raw(16) NOT NULL REFERENCES item(uuid), - type_id INTEGER NOT NULL REFERENCES relationship_type(id), - right_id raw(16) NOT NULL REFERENCES item(uuid), - left_place INTEGER, - right_place INTEGER, - CONSTRAINT u_constraint UNIQUE (left_id, type_id, right_id) - -); - -CREATE INDEX entity_type_label_idx ON entity_type(label); -CREATE INDEX rl_ty_by_left_type_idx ON relationship_type(left_type); -CREATE INDEX rl_ty_by_right_type_idx ON relationship_type(right_type); -CREATE INDEX rl_ty_by_left_label_idx ON relationship_type(left_label); -CREATE INDEX rl_ty_by_right_label_idx ON relationship_type(right_label); -CREATE INDEX relationship_by_left_id_idx ON relationship(left_id); -CREATE INDEX relationship_by_right_id_idx ON relationship(right_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.06.07__DS-3851-permission.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.06.07__DS-3851-permission.sql deleted file mode 100644 index 68ed690f89e8..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.06.07__DS-3851-permission.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ----------------------------------------------------------------------------------------------------------------- --- This adds TYPE_INHERITED to all old archived items permission due to the change on resource policy management ----------------------------------------------------------------------------------------------------------------- -UPDATE resourcepolicy set rptype = 'TYPE_INHERITED' - where resource_type_id = 2 and rptype is null - and dspace_object in ( - select uuid from item where in_archive = 1 - ); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.05.02__DS-4239-workflow-xml-migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.05.02__DS-4239-workflow-xml-migration.sql deleted file mode 100644 index b23170f43732..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.05.02__DS-4239-workflow-xml-migration.sql +++ /dev/null @@ -1,17 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-4239 Migrate the workflow.xml to spring ---------------------------------------------------------------- --- This script will rename the default workflow "default" name --- to the new "defaultWorkflow" identifier ---------------------------------------------------------------- - -UPDATE cwf_pooltask SET workflow_id='defaultWorkflow' WHERE workflow_id='default'; -UPDATE cwf_claimtask SET workflow_id='defaultWorkflow' WHERE workflow_id='default'; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.07.31__Retrieval_of_name_variant.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.07.31__Retrieval_of_name_variant.sql deleted file mode 100644 index cebae09f651c..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.07.31__Retrieval_of_name_variant.sql +++ /dev/null @@ -1,18 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create columns leftwardValue and rightwardValue in table relationship --- Rename columns left_label and right_label to leftward_type and rightward_type ------------------------------------------------------------------------------------ - -ALTER TABLE relationship ADD leftward_value VARCHAR2(50); -ALTER TABLE relationship ADD rightward_value VARCHAR2(50); - -ALTER TABLE relationship_type RENAME COLUMN left_label TO leftward_type; -ALTER TABLE relationship_type RENAME COLUMN right_label TO rightward_type; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019_06_14__scripts-and-process.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019_06_14__scripts-and-process.sql deleted file mode 100644 index a7015e3033bf..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019_06_14__scripts-and-process.sql +++ /dev/null @@ -1,40 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== -CREATE SEQUENCE process_id_seq; - -CREATE TABLE process -( - process_id INTEGER NOT NULL PRIMARY KEY, - user_id RAW(16) NOT NULL, - start_time TIMESTAMP, - finished_time TIMESTAMP, - creation_time TIMESTAMP NOT NULL, - script VARCHAR(256) NOT NULL, - status VARCHAR(32), - parameters VARCHAR(512) -); - -CREATE TABLE process2bitstream -( - process_id INTEGER REFERENCES process(process_id), - bitstream_id RAW(16) REFERENCES bitstream(uuid), - CONSTRAINT PK_process2bitstream PRIMARY KEY (process_id, bitstream_id) -); - -CREATE INDEX process_user_id_idx ON process(user_id); -CREATE INDEX process_status_idx ON process(status); -CREATE INDEX process_name_idx on process(script); -CREATE INDEX process_start_time_idx on process(start_time); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql deleted file mode 100644 index a108fd74b468..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql +++ /dev/null @@ -1,29 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------- --- This will create the setup for the IRUS statistics harvester -------------------------------------------------------------- - -CREATE SEQUENCE openurltracker_seq; - -CREATE TABLE openurltracker -( - tracker_id NUMBER, - tracker_url VARCHAR2(1000), - uploaddate DATE, - CONSTRAINT openurltracker_PK PRIMARY KEY (tracker_id) -); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.03.18__Move_entity_type_to_dspace_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.03.18__Move_entity_type_to_dspace_schema.sql deleted file mode 100644 index 9c39091f89dc..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.03.18__Move_entity_type_to_dspace_schema.sql +++ /dev/null @@ -1,56 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------------------------------------------------- --- Move all 'relationship.type' metadata fields to 'dspace.entity.type'. Remove 'relationship' schema. -------------------------------------------------------------------------------------------------------- --- Special case: we need to the 'dspace' schema to already exist. If users don't already have it we must create it --- manually via SQL, as by default it won't be created until database updates are finished. -INSERT INTO metadataschemaregistry (metadata_schema_id, namespace, short_id) - SELECT metadataschemaregistry_seq.nextval, 'http://dspace.org/dspace' as namespace, 'dspace' as short_id FROM dual - WHERE NOT EXISTS - (SELECT metadata_schema_id,namespace,short_id FROM metadataschemaregistry - WHERE namespace = 'http://dspace.org/dspace' AND short_id = 'dspace'); - - --- Add 'dspace.entity.type' field to registry (if missing) -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element, qualifier) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dspace'), 'entity', 'type' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element,qualifier FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dspace') - AND element = 'entitye' AND qualifier='type'); - --- Moves all 'relationship.type' field values to a new 'dspace.entity.type' field -UPDATE metadatavalue - SET metadata_field_id = - (SELECT metadata_field_id FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dspace') - AND element = 'entity' AND qualifier='type') - WHERE metadata_field_id = - (SELECT metadata_field_id FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='relationship') - AND element = 'type' AND qualifier is NULL); - - --- Delete 'relationship.type' field from registry -DELETE FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id = 'relationship') - AND element = 'type' AND qualifier is NULL; - --- Delete 'relationship' schema (which is now empty) -DELETE FROM metadataschemaregistry WHERE short_id = 'relationship' AND namespace = 'http://dspace.org/relationship'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql deleted file mode 100644 index 5a6abda04101..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql +++ /dev/null @@ -1,28 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------------------------------------------------- -------------------------------------------------------------------------------------------------------- -UPDATE metadatavalue SET dspace_object_id = (SELECT uuid - FROM collection - WHERE template_item_id = dspace_object_id) -WHERE dspace_object_id IN (SELECT template_item_id - FROM Collection) - AND metadata_field_id - IN (SELECT metadata_field_id - FROM metadatafieldregistry mfr LEFT JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE msr.short_id = 'dspace' AND mfr.element = 'entity' AND mfr.qualifier = 'type'); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql deleted file mode 100644 index ae8f1e7ef5d2..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql +++ /dev/null @@ -1,15 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------------------- ----- ALTER table collection -------------------------------------------------------------------------------------- - -ALTER TABLE collection DROP COLUMN workflow_step_1; -ALTER TABLE collection DROP COLUMN workflow_step_2; -ALTER TABLE collection DROP COLUMN workflow_step_3; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql deleted file mode 100644 index 9c39c15e66e2..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Make sure the metadatavalue.place column starts at 0 instead of 1 ----------------------------------------------------- -MERGE INTO metadatavalue mdv -USING ( - SELECT dspace_object_id, metadata_field_id, MIN(place) AS minplace - FROM metadatavalue - GROUP BY dspace_object_id, metadata_field_id -) mp -ON ( - mdv.dspace_object_id = mp.dspace_object_id - AND mdv.metadata_field_id = mp.metadata_field_id - AND mp.minplace > 0 -) -WHEN MATCHED THEN UPDATE -SET mdv.place = mdv.place - mp.minplace; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.05.16__Orcid_token_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.05.16__Orcid_token_table.sql deleted file mode 100644 index 14bf8531439f..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.05.16__Orcid_token_table.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create table for ORCID access tokens ------------------------------------------------------------------------------------ - -CREATE SEQUENCE orcid_token_id_seq; - -CREATE TABLE orcid_token -( - id INTEGER NOT NULL, - eperson_id RAW(16) NOT NULL UNIQUE, - profile_item_id RAW(16), - access_token VARCHAR2(100) NOT NULL, - CONSTRAINT orcid_token_pkey PRIMARY KEY (id), - CONSTRAINT orcid_token_eperson_id_fkey FOREIGN KEY (eperson_id) REFERENCES eperson (uuid), - CONSTRAINT orcid_token_profile_item_id_fkey FOREIGN KEY (profile_item_id) REFERENCES item (uuid) -); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.8__Upgrade_to_DSpace_1.8_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql similarity index 60% rename from dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.8__Upgrade_to_DSpace_1.8_schema.sql rename to dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql index f96cddbe7fd4..5ebd41a866d0 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.8__Upgrade_to_DSpace_1.8_schema.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql @@ -13,11 +13,3 @@ -- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. -- http://flywaydb.org/ -- =============================================================== - -------------------------------------------- --- New column for bitstream order DS-749 -- -------------------------------------------- -ALTER TABLE bundle2bitstream ADD bitstream_order INTEGER; - ---Place the sequence id's in the order -UPDATE bundle2bitstream SET bitstream_order=(SELECT sequence_id FROM bitstream WHERE bitstream.bitstream_id=bundle2bitstream.bitstream_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql deleted file mode 100644 index 3eb9ae6dd4f8..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql +++ /dev/null @@ -1,10 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- NOTE: default 0 ensures that existing relations have "latest_version_status" set to "both" (first constant in enum, see Relationship class) -ALTER TABLE relationship ADD latest_version_status INTEGER DEFAULT 0 NOT NULL; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql deleted file mode 100644 index 3862830230e3..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql +++ /dev/null @@ -1,45 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- ADD table subscription_parameter ------------------------------------------------------------------------------------ - - -CREATE SEQUENCE if NOT EXISTS subscription_parameter_seq; ------------------------------------------------------------------------------------ --- ADD table subscription_parameter ------------------------------------------------------------------------------------ -CREATE TABLE if NOT EXISTS subscription_parameter -( - subscription_parameter_id INTEGER NOT NULL, - name VARCHAR(255), - value VARCHAR(255), - subscription_id INTEGER NOT NULL, - CONSTRAINT subscription_parameter_pkey PRIMARY KEY (subscription_parameter_id), - CONSTRAINT subscription_parameter_subscription_fkey FOREIGN KEY (subscription_id) - REFERENCES subscription (subscription_id) ON DELETE CASCADE -); --- -- - -ALTER TABLE subscription ADD COLUMN if NOT EXISTS dspace_object_id UUID; ----- -- -ALTER TABLE subscription ADD COLUMN if NOT EXISTS type CHARACTER VARYING(255); --- -UPDATE subscription SET dspace_object_id = collection_id , type = 'content'; --- -ALTER TABLE subscription DROP CONSTRAINT IF EXISTS subscription_dspaceobject_fkey; -ALTER TABLE subscription ADD CONSTRAINT subscription_dspaceobject_fkey FOREIGN KEY (dspace_object_id) REFERENCES dspaceobject (uuid); --- -ALTER TABLE subscription DROP CONSTRAINT IF EXISTS subscription_collection_id_fkey; ----- -- -ALTER TABLE subscription DROP COLUMN IF EXISTS collection_id; --- -- -INSERT INTO subscription_parameter (subscription_parameter_id, name, value, subscription_id) -SELECT getnextid('subscription_parameter'), 'frequency', 'D', subscription_id from "subscription" ; - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.09__Supervision_Orders_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.09__Supervision_Orders_table.sql deleted file mode 100644 index c7bb0b502ec2..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.09__Supervision_Orders_table.sql +++ /dev/null @@ -1,78 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------------- --- Table to store supervision orders -------------------------------------------------------------------------------- - -CREATE TABLE supervision_orders -( - id INTEGER PRIMARY KEY, - item_id UUID REFERENCES Item(uuid) ON DELETE CASCADE, - eperson_group_id UUID REFERENCES epersongroup(uuid) ON DELETE CASCADE -); - -CREATE SEQUENCE supervision_orders_seq; - -INSERT INTO supervision_orders (id, item_id, eperson_group_id) -SELECT supervision_orders_seq.nextval AS id, w.item_id, e.uuid -FROM epersongroup2workspaceitem ew INNER JOIN workspaceitem w -ON ew.workspace_item_id = w.workspace_item_id -INNER JOIN epersongroup e -ON ew.eperson_group_id = e.uuid; - - --- UPDATE policies for supervision orders --- items, bundles and bitstreams - -DECLARE -BEGIN - -FOR rec IN -( -SELECT so.item_id as dspace_object, so.eperson_group_id, rp.resource_type_id -FROM supervision_orders so -INNER JOIN RESOURCEPOLICY rp on so.item_id = rp.dspace_object -AND so.eperson_group_id = rp.epersongroup_id -WHERE rp.rptype IS NULL - -UNION - -SELECT ib.bundle_id as dspace_object, so.eperson_group_id, rp.resource_type_id -FROM supervision_orders so -INNER JOIN item2bundle ib ON so.item_id = ib.item_id -INNER JOIN RESOURCEPOLICY rp on ib.bundle_id = rp.dspace_object -AND so.eperson_group_id = rp.epersongroup_id -WHERE rp.rptype IS NULL - -UNION - -SELECT bs.bitstream_id as dspace_object, so.eperson_group_id, rp.resource_type_id -FROM supervision_orders so -INNER JOIN item2bundle ib ON so.item_id = ib.item_id -INNER JOIN bundle2bitstream bs ON ib.bundle_id = bs.bundle_id -INNER JOIN RESOURCEPOLICY rp on bs.bitstream_id = rp.dspace_object -AND so.eperson_group_id = rp.epersongroup_id -WHERE rp.rptype IS NULL -) - -LOOP - -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_SUBMISSION' -where dspace_object = rec.dspace_object -AND epersongroup_id = rec.eperson_group_id -AND rptype IS NULL; - -END LOOP; -END; - -------------------------------------------------------------------------------- --- drop epersongroup2workspaceitem table -------------------------------------------------------------------------------- - -DROP TABLE epersongroup2workspaceitem; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.04.19__process_parameters_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.04.19__process_parameters_to_text_type.sql new file mode 100644 index 000000000000..6b2dd705ea68 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.04.19__process_parameters_to_text_type.sql @@ -0,0 +1,9 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +ALTER TABLE process MODIFY (parameters CLOB); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.7__Upgrade_to_DSpace_1.7_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.10.23__add_custom_filter.sql similarity index 71% rename from dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.7__Upgrade_to_DSpace_1.7_schema.sql rename to dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.10.23__add_custom_filter.sql index f4b2737fb3a8..369bd14f7064 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.7__Upgrade_to_DSpace_1.7_schema.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.10.23__add_custom_filter.sql @@ -14,7 +14,4 @@ -- http://flywaydb.org/ -- =============================================================== ------------------------------------------------------------------- --- Remove unused / obsolete sequence 'dctyperegistry_seq' (DS-729) ------------------------------------------------------------------- -DROP SEQUENCE dctyperegistry_seq; +ALTER TABLE cris_layout_tab ADD IF NOT EXISTS custom_filter varchar(255); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.10.28__update_cris_layout_tab_constraint.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.10.28__update_cris_layout_tab_constraint.sql new file mode 100644 index 000000000000..6ea435bfeed2 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.10.28__update_cris_layout_tab_constraint.sql @@ -0,0 +1,18 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +-- -- +-- Remove unique constraint on entity_id and shortname of table cris_layout_tab. +-- Now the entity_id and shortname aren't unique because entity_type can have custom_filter in it +-- -- +ALTER TABLE cris_layout_tab DROP CONSTRAINT cris_layout_tab_entity_shortname_unique; + +-- -- +-- +-- -- +ALTER TABLE cris_layout_tab ADD CONSTRAINT cris_layout_tab_entity_shortname_custom_filter_unique UNIQUE(entity_id, shortname, custom_filter); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.12.12__add_alternative_tab_id_to_cris_layout_tab2securitygroup.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.12.12__add_alternative_tab_id_to_cris_layout_tab2securitygroup.sql new file mode 100644 index 000000000000..6ae50fb29bf6 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.12.12__add_alternative_tab_id_to_cris_layout_tab2securitygroup.sql @@ -0,0 +1,14 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Alter TABLE cris_layout_tab2securitygroup ADD alternative_tab_id +----------------------------------------------------------------------------------- + +ALTER TABLE cris_layout_tab2securitygroup ADD COLUMN alternative_tab_id INTEGER; +ALTER TABLE cris_layout_tab2securitygroup ADD CONSTRAINT cris_layout_tab2securitygroup_tab_id2 FOREIGN KEY (alternative_tab_id) REFERENCES cris_layout_tab (id) ON DELETE SET NULL; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.12.13__add_alternative_box_id_to_cris_layout_box2securitygroup.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.12.13__add_alternative_box_id_to_cris_layout_box2securitygroup.sql new file mode 100644 index 000000000000..38360bb13cd8 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.12.13__add_alternative_box_id_to_cris_layout_box2securitygroup.sql @@ -0,0 +1,14 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Alter TABLE cris_layout_box2securitygroup ADD alternative_box_id +----------------------------------------------------------------------------------- + +ALTER TABLE cris_layout_box2securitygroup ADD COLUMN alternative_box_id INTEGER; +ALTER TABLE cris_layout_box2securitygroup ADD CONSTRAINT cris_layout_box2securitygroup_box_id2 FOREIGN KEY (alternative_box_id) REFERENCES cris_layout_box (id) ON DELETE SET NULL; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/update-sequences.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/update-sequences.sql deleted file mode 100644 index b4d4d755cbe7..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/update-sequences.sql +++ /dev/null @@ -1,77 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- SQL code to update the ID (primary key) generating sequences, if some --- import operation has set explicit IDs. --- --- Sequences are used to generate IDs for new rows in the database. If a --- bulk import operation, such as an SQL dump, specifies primary keys for --- imported data explicitly, the sequences are out of sync and need updating. --- This SQL code does just that. --- --- This should rarely be needed; any bulk import should be performed using the --- org.dspace.content API which is safe to use concurrently and in multiple --- JVMs. The SQL code below will typically only be required after a direct --- SQL data dump from a backup or somesuch. - --- The 'updateseq' procedure was derived from incseq.sql found at: --- http://www.akadia.com/services/scripts/incseq.sql - -DECLARE - PROCEDURE updateseq ( seq IN VARCHAR, - tbl IN VARCHAR, - attr IN VARCHAR, - cond IN VARCHAR DEFAULT '' ) IS - curr NUMBER := 0; - BEGIN - EXECUTE IMMEDIATE 'SELECT max(' || attr - || ') FROM ' || tbl - || ' ' || cond - INTO curr; - curr := curr + 1; - EXECUTE IMMEDIATE 'DROP SEQUENCE ' || seq; - EXECUTE IMMEDIATE 'CREATE SEQUENCE ' - || seq - || ' START WITH ' - || NVL(curr, 1); - END updateseq; - -BEGIN - updateseq('bitstreamformatregistry_seq', 'bitstreamformatregistry', - 'bitstream_format_id'); - updateseq('fileextension_seq', 'fileextension', 'file_extension_id'); - updateseq('resourcepolicy_seq', 'resourcepolicy', 'policy_id'); - updateseq('workspaceitem_seq', 'workspaceitem', 'workspace_item_id'); - updateseq('registrationdata_seq', 'registrationdata', - 'registrationdata_id'); - updateseq('subscription_seq', 'subscription', 'subscription_id'); - updateseq('metadatafieldregistry_seq', 'metadatafieldregistry', - 'metadata_field_id'); - updateseq('metadatavalue_seq', 'metadatavalue', 'metadata_value_id'); - updateseq('metadataschemaregistry_seq', 'metadataschemaregistry', - 'metadata_schema_id'); - updateseq('harvested_collection_seq', 'harvested_collection', 'id'); - updateseq('harvested_item_seq', 'harvested_item', 'id'); - updateseq('webapp_seq', 'webapp', 'webapp_id'); - updateseq('requestitem_seq', 'requestitem', 'requestitem_id'); - updateseq('handle_id_seq', 'handle', 'handle_id'); - - -- Handle Sequence is a special case. Since Handles minted by DSpace - -- use the 'handle_seq', we need to ensure the next assigned handle - -- will *always* be unique. So, 'handle_seq' always needs to be set - -- to the value of the *largest* handle suffix. That way when the - -- next handle is assigned, it will use the next largest number. This - -- query does the following: - -- For all 'handle' values which have a number in their suffix - -- (after '/'), find the maximum suffix value, convert it to a - -- number, and set the 'handle_seq' to start at the next value (see - -- updateseq above for more). - updateseq('handle_seq', 'handle', - q'{to_number(regexp_replace(handle, '.*/', ''), '999999999999')}', - q'{WHERE REGEXP_LIKE(handle, '^.*/[0123456789]*$')}'); -END; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md index 72eb279912b5..e16e4c6d4c91 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md @@ -3,8 +3,9 @@ The SQL scripts in this directory are PostgreSQL-specific database migrations. They are used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/). As such, these scripts are automatically called by Flyway when the DSpace -`DatabaseManager` initializes itself (see `initializeDatabase()` method). During -that process, Flyway determines which version of DSpace your database is using +`DatabaseUtils` initializes. + +During that process, Flyway determines which version of DSpace your database is using and then executes the appropriate upgrade script(s) to bring it up to the latest version. @@ -22,7 +23,7 @@ Please see the Flyway Documentation for more information: http://flywaydb.org/ The `update-sequences.sql` script in this directory may still be used to update your internal database counts if you feel they have gotten out of "sync". This may sometimes occur after large restores of content (e.g. when using the DSpace -[AIP Backup and Restore](https://wiki.duraspace.org/display/DSDOC5x/AIP+Backup+and+Restore) +[AIP Backup and Restore](https://wiki.lyrasis.org/display/DSDOC7x/AIP+Backup+and+Restore) feature). This `update-sequences.sql` script can be executed by running diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2023.09.22__registration_data.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2023.09.22__registration_data.sql new file mode 100644 index 000000000000..91bc76e7e887 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2023.09.22__registration_data.sql @@ -0,0 +1,64 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- ALTER table registrationdata +----------------------------------------------------------------------------------- + +DO $$ + BEGIN + if exists (select constraint_name + from information_schema.constraint_column_usage + where TABLE_SCHEMA = 'public' AND TABLE_NAME = 'registrationdata' AND COLUMN_NAME = 'email') then + EXECUTE 'ALTER TABLE registrationdata DROP CONSTRAINT ' || + QUOTE_IDENT(( + SELECT CONSTRAINT_NAME + FROM information_schema.key_column_usage + WHERE TABLE_SCHEMA = 'public' AND TABLE_NAME = 'registrationdata' AND COLUMN_NAME = 'email' + )); + end if; + end +$$; + +ALTER TABLE registrationdata +ADD COLUMN IF NOT EXISTS registration_type VARCHAR(255); + +ALTER TABLE registrationdata +ADD COLUMN IF NOT EXISTS net_id VARCHAR(64); + +CREATE SEQUENCE IF NOT EXISTS registrationdata_metadatavalue_seq START WITH 1 INCREMENT BY 1; + +----------------------------------------------------------------------------------- +-- Creates table registrationdata_metadata +----------------------------------------------------------------------------------- +DO $$ + BEGIN + IF NOT EXISTS (SELECT FROM pg_catalog.pg_tables + WHERE schemaname = 'public' + AND tablename = 'registrationdata_metadata') THEN + CREATE TABLE registrationdata_metadata ( + registrationdata_metadata_id INTEGER NOT NULL, + registrationdata_id INTEGER, + metadata_field_id INTEGER, + text_value TEXT, + CONSTRAINT pk_registrationdata_metadata PRIMARY KEY (registrationdata_metadata_id) + ); + + ALTER TABLE registrationdata_metadata + ADD CONSTRAINT FK_REGISTRATIONDATA_METADATA_ON_METADATA_FIELD + FOREIGN KEY (metadata_field_id) + REFERENCES metadatafieldregistry (metadata_field_id) ON DELETE CASCADE; + + ALTER TABLE registrationdata_metadata + ADD CONSTRAINT FK_REGISTRATIONDATA_METADATA_ON_REGISTRATIONDATA + FOREIGN KEY (registrationdata_id) + REFERENCES registrationdata (registrationdata_id) ON DELETE CASCADE; + + END IF; + END +$$; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.01.22__Remove_basic_workflow.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql similarity index 65% rename from dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.01.22__Remove_basic_workflow.sql rename to dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql index f71173abe607..e4544e1de729 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.01.22__Remove_basic_workflow.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql @@ -7,11 +7,7 @@ -- ----------------------------------------------------------------------------------- --- Drop the 'workflowitem' and 'tasklistitem' tables +-- Drop the 'history_seq' sequence (related table deleted at Dspace-1.5) ----------------------------------------------------------------------------------- -DROP TABLE workflowitem CASCADE CONSTRAINTS; -DROP TABLE tasklistitem CASCADE CONSTRAINTS; - -DROP SEQUENCE workflowitem_seq; -DROP SEQUENCE tasklistitem_seq; \ No newline at end of file +DROP SEQUENCE IF EXISTS history_seq; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.15__system_wide_alerts.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql similarity index 57% rename from dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.15__system_wide_alerts.sql rename to dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql index 9d13138fdada..8aec44a7f6f2 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.15__system_wide_alerts.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql @@ -7,16 +7,11 @@ -- ----------------------------------------------------------------------------------- --- Create table for System wide alerts +-- Update short description for PNG mimetype in the bitstream format registry +-- See: https://github.com/DSpace/DSpace/pull/8722 ----------------------------------------------------------------------------------- -CREATE SEQUENCE alert_id_seq; - -CREATE TABLE systemwidealert -( - alert_id INTEGER NOT NULL PRIMARY KEY, - message VARCHAR(512), - allow_sessions VARCHAR(64), - countdown_to TIMESTAMP, - active BOOLEAN -); +UPDATE bitstreamformatregistry +SET short_description='PNG' +WHERE short_description='image/png' + AND mimetype='image/png'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql new file mode 100644 index 000000000000..ae0e414e4440 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql @@ -0,0 +1,10 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +ALTER TABLE orcid_history ALTER COLUMN description TYPE TEXT; +ALTER TABLE orcid_queue ALTER COLUMN description TYPE TEXT; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.04.19__process_parameters_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.04.19__process_parameters_to_text_type.sql new file mode 100644 index 000000000000..f7e0e51d0bf7 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.04.19__process_parameters_to_text_type.sql @@ -0,0 +1,9 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +ALTER TABLE process ALTER COLUMN parameters TYPE TEXT; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql new file mode 100644 index 000000000000..9dd2f54a43eb --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql @@ -0,0 +1,34 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +BEGIN; + +-- Unset any primary bitstream that is marked as deleted +UPDATE bundle +SET primary_bitstream_id = NULL +WHERE primary_bitstream_id IN + ( SELECT bs.uuid + FROM bitstream AS bs + INNER JOIN bundle as bl ON bs.uuid = bl.primary_bitstream_id + WHERE bs.deleted IS TRUE ); + +-- Unset any primary bitstream that don't belong to bundle's bitstream list +UPDATE bundle +SET primary_bitstream_id = NULL +WHERE primary_bitstream_id IN + ( SELECT bl.primary_bitstream_id + FROM bundle as bl + WHERE bl.primary_bitstream_id IS NOT NULL + AND bl.primary_bitstream_id NOT IN + ( SELECT bitstream_id + FROM bundle2bitstream AS b2b + WHERE b2b.bundle_id = bl.uuid + ) + ); + +COMMIT; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2017.10.12__DS-3542-stateless-sessions.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.23__add_custom_filter.sql similarity index 61% rename from dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2017.10.12__DS-3542-stateless-sessions.sql rename to dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.23__add_custom_filter.sql index 30cfae91c83a..369bd14f7064 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2017.10.12__DS-3542-stateless-sessions.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.23__add_custom_filter.sql @@ -14,7 +14,4 @@ -- http://flywaydb.org/ -- =============================================================== ------------------------------------------------------------------------------------------------------------- --- This adds an extra column to the eperson table where we save a salt for stateless authentication ------------------------------------------------------------------------------------------------------------- -ALTER TABLE eperson ADD session_salt varchar(32); \ No newline at end of file +ALTER TABLE cris_layout_tab ADD IF NOT EXISTS custom_filter varchar(255); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.28__update_cris_layout_tab_constraint.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.28__update_cris_layout_tab_constraint.sql new file mode 100644 index 000000000000..6ea435bfeed2 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.28__update_cris_layout_tab_constraint.sql @@ -0,0 +1,18 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +-- -- +-- Remove unique constraint on entity_id and shortname of table cris_layout_tab. +-- Now the entity_id and shortname aren't unique because entity_type can have custom_filter in it +-- -- +ALTER TABLE cris_layout_tab DROP CONSTRAINT cris_layout_tab_entity_shortname_unique; + +-- -- +-- +-- -- +ALTER TABLE cris_layout_tab ADD CONSTRAINT cris_layout_tab_entity_shortname_custom_filter_unique UNIQUE(entity_id, shortname, custom_filter); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.11.13__align_sequences_DBMSImportFramework2_with_current_ids.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.11.13__align_sequences_DBMSImportFramework2_with_current_ids.sql new file mode 100644 index 000000000000..9d5cb69e43fb --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.11.13__align_sequences_DBMSImportFramework2_with_current_ids.sql @@ -0,0 +1,30 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Create sequences for DBMS Import framework +----------------------------------------------------------------------------------- +do $$ +begin + + SELECT pg_catalog.setval('imp_record_seq', (SELECT coalesce(MAX(imp_id),0) FROM imp_record)+1); + + SELECT pg_catalog.setval('imp_metadatavalue_seq', (SELECT coalesce(MAX(imp_metadatavalue_id),0) FROM imp_metadatavalue)+1); + + SELECT pg_catalog.setval('imp_bitstream_seq', (SELECT coalesce(MAX(imp_bitstream_id),0) FROM imp_bitstream)+1); + + SELECT pg_catalog.setval('imp_bitstream_metadatavalue_seq', (SELECT coalesce(MAX(imp_bitstream_metadatavalue_id),0) FROM imp_bitstream_metadatavalue)+1); + +exception when others then + + raise notice 'The transaction is in an uncommittable state. ' + 'Transaction was rolled back'; + + raise notice 'Rollback --> % %', SQLERRM, SQLSTATE; +end; +$$ language 'plpgsql'; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.12.12__add_alternative_tab_id_to_cris_layout_tab2securitygroup.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.12.12__add_alternative_tab_id_to_cris_layout_tab2securitygroup.sql new file mode 100644 index 000000000000..6ae50fb29bf6 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.12.12__add_alternative_tab_id_to_cris_layout_tab2securitygroup.sql @@ -0,0 +1,14 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Alter TABLE cris_layout_tab2securitygroup ADD alternative_tab_id +----------------------------------------------------------------------------------- + +ALTER TABLE cris_layout_tab2securitygroup ADD COLUMN alternative_tab_id INTEGER; +ALTER TABLE cris_layout_tab2securitygroup ADD CONSTRAINT cris_layout_tab2securitygroup_tab_id2 FOREIGN KEY (alternative_tab_id) REFERENCES cris_layout_tab (id) ON DELETE SET NULL; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.12.13__add_alternative_box_id_to_cris_layout_box2securitygroup.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.12.13__add_alternative_box_id_to_cris_layout_box2securitygroup.sql new file mode 100644 index 000000000000..38360bb13cd8 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.12.13__add_alternative_box_id_to_cris_layout_box2securitygroup.sql @@ -0,0 +1,14 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Alter TABLE cris_layout_box2securitygroup ADD alternative_box_id +----------------------------------------------------------------------------------- + +ALTER TABLE cris_layout_box2securitygroup ADD COLUMN alternative_box_id INTEGER; +ALTER TABLE cris_layout_box2securitygroup ADD CONSTRAINT cris_layout_box2securitygroup_box_id2 FOREIGN KEY (alternative_box_id) REFERENCES cris_layout_box (id) ON DELETE SET NULL; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/update-sequences.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/update-sequences.sql index 749f82382c9d..f96434f1ba8c 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/update-sequences.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/update-sequences.sql @@ -19,21 +19,41 @@ -- JVMs. The SQL code below will typically only be required after a direct -- SQL data dump from a backup or somesuch. - +SELECT setval('alert_id_seq', max(alert_id)) FROM systemwidealert; SELECT setval('bitstreamformatregistry_seq', max(bitstream_format_id)) FROM bitstreamformatregistry; +SELECT setval('checksum_history_check_id_seq', max(check_id)) FROM checksum_history; +SELECT setval('cwf_claimtask_seq', max(claimtask_id)) FROM cwf_claimtask; +SELECT setval('cwf_collectionrole_seq', max(collectionrole_id)) FROM cwf_collectionrole; +SELECT setval('cwf_in_progress_user_seq', max(in_progress_user_id)) FROM cwf_in_progress_user; +SELECT setval('cwf_pooltask_seq', max(pooltask_id)) FROM cwf_pooltask; +SELECT setval('cwf_workflowitem_seq', max(workflowitem_id)) FROM cwf_workflowitem; +SELECT setval('cwf_workflowitemrole_seq', max(workflowitemrole_id)) FROM cwf_workflowitemrole; +SELECT setval('doi_seq', max(doi_id)) FROM doi; +SELECT setval('entity_type_id_seq', max(id)) FROM entity_type; SELECT setval('fileextension_seq', max(file_extension_id)) FROM fileextension; -SELECT setval('resourcepolicy_seq', max(policy_id)) FROM resourcepolicy; -SELECT setval('workspaceitem_seq', max(workspace_item_id)) FROM workspaceitem; -SELECT setval('registrationdata_seq', max(registrationdata_id)) FROM registrationdata; -SELECT setval('subscription_seq', max(subscription_id)) FROM subscription; -SELECT setval('metadatafieldregistry_seq', max(metadata_field_id)) FROM metadatafieldregistry; -SELECT setval('metadatavalue_seq', max(metadata_value_id)) FROM metadatavalue; -SELECT setval('metadataschemaregistry_seq', max(metadata_schema_id)) FROM metadataschemaregistry; +SELECT setval('handle_id_seq', max(handle_id)) FROM handle; SELECT setval('harvested_collection_seq', max(id)) FROM harvested_collection; SELECT setval('harvested_item_seq', max(id)) FROM harvested_item; -SELECT setval('webapp_seq', max(webapp_id)) FROM webapp; +SELECT setval('metadatafieldregistry_seq', max(metadata_field_id)) FROM metadatafieldregistry; +SELECT setval('metadataschemaregistry_seq', max(metadata_schema_id)) FROM metadataschemaregistry; +SELECT setval('metadatavalue_seq', max(metadata_value_id)) FROM metadatavalue; +SELECT setval('openurltracker_seq', max(tracker_id)) FROM openurltracker; +SELECT setval('orcid_history_id_seq', max(id)) FROM orcid_history; +SELECT setval('orcid_queue_id_seq', max(id)) FROM orcid_queue; +SELECT setval('orcid_token_id_seq', max(id)) FROM orcid_token; +SELECT setval('process_id_seq', max(process_id)) FROM process; +SELECT setval('registrationdata_seq', max(registrationdata_id)) FROM registrationdata; +SELECT setval('relationship_id_seq', max(id)) FROM relationship; +SELECT setval('relationship_type_id_seq', max(id)) FROM relationship_type; SELECT setval('requestitem_seq', max(requestitem_id)) FROM requestitem; -SELECT setval('handle_id_seq', max(handle_id)) FROM handle; +SELECT setval('resourcepolicy_seq', max(policy_id)) FROM resourcepolicy; +SELECT setval('subscription_parameter_seq', max(subscription_id)) FROM subscription_parameter; +SELECT setval('subscription_seq', max(subscription_id)) FROM subscription; +SELECT setval('supervision_orders_seq', max(id)) FROM supervision_orders; +SELECT setval('versionhistory_seq', max(versionhistory_id)) FROM versionhistory; +SELECT setval('versionitem_seq', max(versionitem_id)) FROM versionitem; +SELECT setval('webapp_seq', max(webapp_id)) FROM webapp; +SELECT setval('workspaceitem_seq', max(workspace_item_id)) FROM workspaceitem; -- Handle Sequence is a special case. Since Handles minted by DSpace use the 'handle_seq', -- we need to ensure the next assigned handle will *always* be unique. So, 'handle_seq' diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V5.7_2017.05.05__DS-3431.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V5.7_2017.05.05__DS-3431.sql deleted file mode 100644 index 9bca3a17c99e..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V5.7_2017.05.05__DS-3431.sql +++ /dev/null @@ -1,503 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------- --- DS-3431 Workflow system is vulnerable to unauthorized manipulations -- -------------------------------------------------------------------------- - ------------------------------------------------------------------------ --- grant claiming permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '5' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 5 AND epersongroup_id = workflow_step_1 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '6' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 6 AND epersongroup_id = workflow_step_2 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '7' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 7 AND epersongroup_id = workflow_step_3 and resource_id = collection_id - ); - ------------------------------------------------------------------------ --- grant add permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_1 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_2 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_3 and resource_id = collection_id - ); - ----------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on workflow items to reviewers -- ----------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 0 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 1 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 2 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 3 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 4 AND eperson_id = owner AND resource_id = item_id - ); - ------------------------------------------------------------------------------------ --- grant read/write/delete/add/remove permission on Bundle ORIGINAL to reviewers -- ------------------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - - -------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on all Bitstreams of Bundle -- --- ORIGINAL to reviewers -- -------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL policy_id, - '0' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql deleted file mode 100644 index 917078594cfa..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql +++ /dev/null @@ -1,37 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2701 Service based API / Hibernate integration ------------------------------------------------------- --- Alter workflow item -ALTER TABLE workflowitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE workflowitem ADD item_id RAW(16) REFERENCES Item(uuid); -UPDATE workflowitem SET item_id = (SELECT item.uuid FROM item WHERE workflowitem.item_legacy_id = item.item_id); -ALTER TABLE workflowitem DROP COLUMN item_legacy_id; - --- Migrate task list item -ALTER TABLE TasklistItem RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE TasklistItem ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -UPDATE TasklistItem SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE TasklistItem.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE TasklistItem DROP COLUMN eperson_legacy_id; - --- Migrate task workflow item -ALTER TABLE workflowitem RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE workflowitem ADD collection_id RAW(16) REFERENCES Collection(uuid); -UPDATE workflowitem SET collection_id = (SELECT collection.uuid FROM collection WHERE workflowitem.collection_legacy_id = collection.collection_id); -ALTER TABLE workflowitem DROP COLUMN collection_legacy_id; -ALTER TABLE workflowitem RENAME COLUMN owner to owner_legacy_id; -ALTER TABLE workflowitem ADD owner RAW(16) REFERENCES EPerson (uuid); -UPDATE workflowitem SET owner = (SELECT eperson.uuid FROM eperson WHERE workflowitem.owner_legacy_id = eperson.eperson_id); -ALTER TABLE workflowitem DROP COLUMN owner_legacy_id; -UPDATE workflowitem SET state = -1 WHERE state IS NULL; -UPDATE workflowitem SET multiple_titles = '0' WHERE multiple_titles IS NULL; -UPDATE workflowitem SET published_before = '0' WHERE published_before IS NULL; -UPDATE workflowitem SET multiple_files = '0' WHERE multiple_files IS NULL; - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.1_2017.01.03__DS-3431.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.1_2017.01.03__DS-3431.sql deleted file mode 100644 index b3887a5af4d1..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.1_2017.01.03__DS-3431.sql +++ /dev/null @@ -1,503 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------- --- DS-3431 Workflow system is vulnerable to unauthorized manipulations -- -------------------------------------------------------------------------- - ------------------------------------------------------------------------ --- grant claiming permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '5' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 5 AND epersongroup_id = workflow_step_1 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '6' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 6 AND epersongroup_id = workflow_step_2 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '7' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 7 AND epersongroup_id = workflow_step_3 and dspace_object = uuid - ); - ------------------------------------------------------------------------ --- grant add permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy -(policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_1 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy -(policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_2 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy -(policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_3 and dspace_object = uuid - ); - ----------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on workflow items to reviewers -- ----------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 0 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 1 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 2 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 3 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 4 AND eperson_id = owner AND dspace_object = item_id - ); - ------------------------------------------------------------------------------------ --- grant read/write/delete/add/remove permission on Bundle ORIGINAL to reviewers -- ------------------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - - -------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on all Bitstreams of Bundle -- --- ORIGINAL to reviewers -- -------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL policy_id, - '0' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql deleted file mode 100644 index 7a992836eea6..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql +++ /dev/null @@ -1,141 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2701 Service based API / Hibernate integration ------------------------------------------------------- -UPDATE collection SET workflow_step_1 = null; -UPDATE collection SET workflow_step_2 = null; -UPDATE collection SET workflow_step_3 = null; - --- cwf_workflowitem - -DROP INDEX cwf_workflowitem_coll_fk_idx; - -ALTER TABLE cwf_workflowitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE cwf_workflowitem ADD item_id RAW(16) REFERENCES Item(uuid); -UPDATE cwf_workflowitem SET item_id = (SELECT item.uuid FROM item WHERE cwf_workflowitem.item_legacy_id = item.item_id); -ALTER TABLE cwf_workflowitem DROP COLUMN item_legacy_id; - -ALTER TABLE cwf_workflowitem RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE cwf_workflowitem ADD collection_id RAW(16) REFERENCES Collection(uuid); -UPDATE cwf_workflowitem SET collection_id = (SELECT collection.uuid FROM collection WHERE cwf_workflowitem.collection_legacy_id = collection.collection_id); -ALTER TABLE cwf_workflowitem DROP COLUMN collection_legacy_id; - -UPDATE cwf_workflowitem SET multiple_titles = '0' WHERE multiple_titles IS NULL; -UPDATE cwf_workflowitem SET published_before = '0' WHERE published_before IS NULL; -UPDATE cwf_workflowitem SET multiple_files = '0' WHERE multiple_files IS NULL; - -CREATE INDEX cwf_workflowitem_coll_fk_idx ON cwf_workflowitem(collection_id); - --- cwf_collectionrole - -ALTER TABLE cwf_collectionrole DROP CONSTRAINT cwf_collectionrole_unique; -DROP INDEX cwf_cr_coll_role_fk_idx; -DROP INDEX cwf_cr_coll_fk_idx; - -ALTER TABLE cwf_collectionrole RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE cwf_collectionrole ADD collection_id RAW(16) REFERENCES Collection(uuid); -UPDATE cwf_collectionrole SET collection_id = (SELECT collection.uuid FROM collection WHERE cwf_collectionrole.collection_legacy_id = collection.collection_id); -ALTER TABLE cwf_collectionrole DROP COLUMN collection_legacy_id; - -ALTER TABLE cwf_collectionrole RENAME COLUMN group_id to group_legacy_id; -ALTER TABLE cwf_collectionrole ADD group_id RAW(16) REFERENCES epersongroup(uuid); -UPDATE cwf_collectionrole SET group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE cwf_collectionrole.group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE cwf_collectionrole DROP COLUMN group_legacy_id; - -ALTER TABLE cwf_collectionrole -ADD CONSTRAINT cwf_collectionrole_unique UNIQUE (role_id, collection_id, group_id); - -CREATE INDEX cwf_cr_coll_role_fk_idx ON cwf_collectionrole(collection_id,role_id); -CREATE INDEX cwf_cr_coll_fk_idx ON cwf_collectionrole(collection_id); - - --- cwf_workflowitemrole - -ALTER TABLE cwf_workflowitemrole DROP CONSTRAINT cwf_workflowitemrole_unique; -DROP INDEX cwf_wfir_item_role_fk_idx; -DROP INDEX cwf_wfir_item_fk_idx; - -ALTER TABLE cwf_workflowitemrole RENAME COLUMN group_id to group_legacy_id; -ALTER TABLE cwf_workflowitemrole ADD group_id RAW(16) REFERENCES epersongroup(uuid); -UPDATE cwf_workflowitemrole SET group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE cwf_workflowitemrole.group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE cwf_workflowitemrole DROP COLUMN group_legacy_id; - -ALTER TABLE cwf_workflowitemrole RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE cwf_workflowitemrole ADD eperson_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_workflowitemrole SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE cwf_workflowitemrole.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_workflowitemrole DROP COLUMN eperson_legacy_id; - - -ALTER TABLE cwf_workflowitemrole -ADD CONSTRAINT cwf_workflowitemrole_unique UNIQUE (role_id, workflowitem_id, eperson_id, group_id); - -CREATE INDEX cwf_wfir_item_role_fk_idx ON cwf_workflowitemrole(workflowitem_id,role_id); -CREATE INDEX cwf_wfir_item_fk_idx ON cwf_workflowitemrole(workflowitem_id); - --- cwf_pooltask - -DROP INDEX cwf_pt_eperson_fk_idx; -DROP INDEX cwf_pt_workflow_eperson_fk_idx; - -ALTER TABLE cwf_pooltask RENAME COLUMN group_id to group_legacy_id; -ALTER TABLE cwf_pooltask ADD group_id RAW(16) REFERENCES epersongroup(uuid); -UPDATE cwf_pooltask SET group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE cwf_pooltask.group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE cwf_pooltask DROP COLUMN group_legacy_id; - -ALTER TABLE cwf_pooltask RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE cwf_pooltask ADD eperson_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_pooltask SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE cwf_pooltask.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_pooltask DROP COLUMN eperson_legacy_id; - -CREATE INDEX cwf_pt_eperson_fk_idx ON cwf_pooltask(eperson_id); -CREATE INDEX cwf_pt_workflow_eperson_fk_idx ON cwf_pooltask(eperson_id,workflowitem_id); - --- cwf_claimtask - -ALTER TABLE cwf_claimtask DROP CONSTRAINT cwf_claimtask_unique; -DROP INDEX cwf_ct_workflow_fk_idx; -DROP INDEX cwf_ct_workflow_eperson_fk_idx; -DROP INDEX cwf_ct_eperson_fk_idx; -DROP INDEX cwf_ct_wfs_fk_idx; -DROP INDEX cwf_ct_wfs_action_fk_idx; -DROP INDEX cwf_ct_wfs_action_e_fk_idx; - -ALTER TABLE cwf_claimtask RENAME COLUMN owner_id to eperson_legacy_id; -ALTER TABLE cwf_claimtask ADD owner_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_claimtask SET owner_id = (SELECT eperson.uuid FROM eperson WHERE cwf_claimtask.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_claimtask DROP COLUMN eperson_legacy_id; - -ALTER TABLE cwf_claimtask -ADD CONSTRAINT cwf_claimtask_unique UNIQUE (step_id, workflowitem_id, workflow_id, owner_id, action_id); - -CREATE INDEX cwf_ct_workflow_fk_idx ON cwf_claimtask(workflowitem_id); -CREATE INDEX cwf_ct_workflow_eperson_fk_idx ON cwf_claimtask(workflowitem_id,owner_id); -CREATE INDEX cwf_ct_eperson_fk_idx ON cwf_claimtask(owner_id); -CREATE INDEX cwf_ct_wfs_fk_idx ON cwf_claimtask(workflowitem_id,step_id); -CREATE INDEX cwf_ct_wfs_action_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id); -CREATE INDEX cwf_ct_wfs_action_e_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id,owner_id); - --- cwf_in_progress_user - -ALTER TABLE cwf_in_progress_user DROP CONSTRAINT cwf_in_progress_user_unique; -DROP INDEX cwf_ipu_workflow_fk_idx; -DROP INDEX cwf_ipu_eperson_fk_idx; - -ALTER TABLE cwf_in_progress_user RENAME COLUMN user_id to eperson_legacy_id; -ALTER TABLE cwf_in_progress_user ADD user_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_in_progress_user SET user_id = (SELECT eperson.uuid FROM eperson WHERE cwf_in_progress_user.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_in_progress_user DROP COLUMN eperson_legacy_id; -UPDATE cwf_in_progress_user SET finished = '0' WHERE finished IS NULL; - -ALTER TABLE cwf_in_progress_user -ADD CONSTRAINT cwf_in_progress_user_unique UNIQUE (workflowitem_id, user_id); - -CREATE INDEX cwf_ipu_workflow_fk_idx ON cwf_in_progress_user(workflowitem_id); -CREATE INDEX cwf_ipu_eperson_fk_idx ON cwf_in_progress_user(user_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql deleted file mode 100644 index 0402fc994887..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql +++ /dev/null @@ -1,27 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- UPDATE policies for claimtasks --- Item -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id JOIN item ON cwf_workflowitem.item_id = item.uuid) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bundles -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT item2bundle.bundle_id FROM cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bitstreams -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT bundle2bitstream.bitstream_id FROM cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Create policies for pooled tasks --- Item -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bundles -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bitstreams -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/data_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/data_workflow_migration.sql deleted file mode 100644 index f582f37c6931..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/data_workflow_migration.sql +++ /dev/null @@ -1,377 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Data Migration for XML/Configurable Workflow --- --- This file will automatically migrate existing --- classic workflows to XML/Configurable workflows. --- NOTE however that the corresponding --- "xml_workflow_migration.sql" script must FIRST be --- called to create the appropriate database tables. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.migration.V5_0_2014_01_01__XMLWorkflow_Migration ----------------------------------------------------- - --- Convert workflow groups: --- TODO: is 'to_number' ok? do not forget to change role_id values - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'reviewer' AS role_id, -collection.workflow_step_1 AS group_id, -collection.collection_id AS collection_id -FROM collection -WHERE collection.workflow_step_1 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'editor' AS role_id, -collection.workflow_step_2 AS group_id, -collection.collection_id AS collection_id -FROM collection -WHERE collection.workflow_step_2 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'finaleditor' AS role_id, -collection.workflow_step_3 AS group_id, -collection.collection_id AS collection_id -FROM collection -WHERE collection.workflow_step_3 IS NOT NULL; - - --- Migrate workflow items -INSERT INTO cwf_workflowitem (workflowitem_id, item_id, collection_id, multiple_titles, published_before, multiple_files) -SELECT -workflow_id AS workflowitem_id, -item_id, -collection_id, -multiple_titles, -published_before, -multiple_files -FROM workflowitem; - - --- Migrate claimed tasks -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'reviewaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 2; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'editaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 4; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'finaleditaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 6; - - --- Migrate pooled tasks -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 1 AND cwf_collectionrole.role_id = 'reviewer'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 3 AND cwf_collectionrole.role_id = 'editor'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 5 AND cwf_collectionrole.role_id = 'finaleditor'; - --- Delete resource policies for workflowitems before creating new ones -DELETE FROM resourcepolicy -WHERE resource_type_id = 2 AND resource_id IN - (SELECT item_id FROM workflowitem); - -DELETE FROM resourcepolicy -WHERE resource_type_id = 1 AND resource_id IN - (SELECT item2bundle.bundle_id FROM - (workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id)); - -DELETE FROM resourcepolicy -WHERE resource_type_id = 0 AND resource_id IN - (SELECT bundle2bitstream.bitstream_id FROM - ((workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id)); --- Create policies for claimtasks --- public static final int BITSTREAM = 0; --- public static final int BUNDLE = 1; --- public static final int ITEM = 2; - --- public static final int READ = 0; --- public static final int WRITE = 1; --- public static final int DELETE = 2; --- public static final int ADD = 3; --- public static final int REMOVE = 4; --- Item --- TODO: getnextID == SELECT sequence.nextval FROM DUAL!! --- Create a temporarty table with action ID's -CREATE TABLE temptable( - action_id INTEGER PRIMARY KEY -); -INSERT ALL - INTO temptable (action_id) VALUES (0) - INTO temptable (action_id) VALUES (1) - INTO temptable (action_id) VALUES (2) - INTO temptable (action_id) VALUES (3) - INTO temptable (action_id) VALUES (4) -SELECT * FROM DUAL; - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS resource_id, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS resource_id, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS resource_id, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - - --- Create policies for pooled tasks - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS resource_id, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS resource_id, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS resource_id, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - --- Drop the temporary table with the action ID's -DROP TABLE temptable; - --- Create policies for submitter --- TODO: only add if unique -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS resource_id, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.item_id); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS resource_id, -0 AS action_id, -item.submitter_id AS eperson_id -FROM ((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.item_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id - ); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS resource_id, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.item_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -); - --- TODO: not tested yet -INSERT INTO cwf_in_progress_user (in_progress_user_id, workflowitem_id, user_id, finished) -SELECT - cwf_in_progress_user_seq.nextval AS in_progress_user_id, - cwf_workflowitem.workflowitem_id AS workflowitem_id, - cwf_claimtask.owner_id AS user_id, - 0 as finished -FROM - (cwf_claimtask INNER JOIN cwf_workflowitem ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id); - --- TODO: improve this, important is NVL(curr, 1)!! without this function, empty tables (max = [null]) will only result in sequence deletion -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitem_id) INTO curr FROM cwf_workflowitem; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitem_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitem_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(collectionrole_id) INTO curr FROM cwf_collectionrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_collectionrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_collectionrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitemrole_id) INTO curr FROM cwf_workflowitemrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitemrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitemrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(pooltask_id) INTO curr FROM cwf_pooltask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_pooltask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_pooltask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(claimtask_id) INTO curr FROM cwf_claimtask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_claimtask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_claimtask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(in_progress_user_id) INTO curr FROM cwf_in_progress_user; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_in_progress_user_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_in_progress_user_seq START WITH ' || NVL(curr, 1); -END; -/ diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_data_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_data_workflow_migration.sql deleted file mode 100644 index 70eb419d8fbb..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_data_workflow_migration.sql +++ /dev/null @@ -1,377 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Data Migration for XML/Configurable Workflow --- --- This file will automatically migrate existing --- classic workflows to XML/Configurable workflows. --- NOTE however that the corresponding --- "xml_workflow_migration.sql" script must FIRST be --- called to create the appropriate database tables. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.xmlworkflow.V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration ----------------------------------------------------- - --- Convert workflow groups: --- TODO: is 'to_number' ok? do not forget to change role_id values - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'reviewer' AS role_id, -collection.workflow_step_1 AS group_id, -collection.uuid AS collection_id -FROM collection -WHERE collection.workflow_step_1 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'editor' AS role_id, -collection.workflow_step_2 AS group_id, -collection.uuid AS collection_id -FROM collection -WHERE collection.workflow_step_2 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'finaleditor' AS role_id, -collection.workflow_step_3 AS group_id, -collection.uuid AS collection_id -FROM collection -WHERE collection.workflow_step_3 IS NOT NULL; - - --- Migrate workflow items -INSERT INTO cwf_workflowitem (workflowitem_id, item_id, collection_id, multiple_titles, published_before, multiple_files) -SELECT -workflow_id AS workflowitem_id, -item_id, -collection_id, -multiple_titles, -published_before, -multiple_files -FROM workflowitem; - - --- Migrate claimed tasks -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'reviewaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 2; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'editaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 4; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'finaleditaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 6; - - --- Migrate pooled tasks -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 1 AND cwf_collectionrole.role_id = 'reviewer'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 3 AND cwf_collectionrole.role_id = 'editor'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 5 AND cwf_collectionrole.role_id = 'finaleditor'; - --- Delete resource policies for workflowitems before creating new ones -DELETE FROM resourcepolicy -WHERE dspace_object IN - (SELECT item_id FROM workflowitem); - -DELETE FROM resourcepolicy -WHERE dspace_object IN - (SELECT item2bundle.bundle_id FROM - (workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id)); - -DELETE FROM resourcepolicy -WHERE dspace_object IN - (SELECT bundle2bitstream.bitstream_id FROM - ((workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id)); --- Create policies for claimtasks --- public static final int BITSTREAM = 0; --- public static final int BUNDLE = 1; --- public static final int ITEM = 2; - --- public static final int READ = 0; --- public static final int WRITE = 1; --- public static final int DELETE = 2; --- public static final int ADD = 3; --- public static final int REMOVE = 4; --- Item --- TODO: getnextID == SELECT sequence.nextval FROM DUAL!! --- Create a temporarty table with action ID's -CREATE TABLE temptable( - action_id INTEGER PRIMARY KEY -); -INSERT ALL - INTO temptable (action_id) VALUES (0) - INTO temptable (action_id) VALUES (1) - INTO temptable (action_id) VALUES (2) - INTO temptable (action_id) VALUES (3) - INTO temptable (action_id) VALUES (4) -SELECT * FROM DUAL; - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS dspace_object, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS dspace_object, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS dspace_object, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - - --- Create policies for pooled tasks - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS dspace_object, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS dspace_object, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS dspace_object, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - --- Drop the temporary table with the action ID's -DROP TABLE temptable; - --- Create policies for submitter --- TODO: only add if unique -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS dspace_object, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.uuid); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS dspace_object, -0 AS action_id, -item.submitter_id AS eperson_id -FROM ((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.uuid) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id - ); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS dspace_object, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.uuid) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -); - --- TODO: not tested yet -INSERT INTO cwf_in_progress_user (in_progress_user_id, workflowitem_id, user_id, finished) -SELECT - cwf_in_progress_user_seq.nextval AS in_progress_user_id, - cwf_workflowitem.workflowitem_id AS workflowitem_id, - cwf_claimtask.owner_id AS user_id, - 0 as finished -FROM - (cwf_claimtask INNER JOIN cwf_workflowitem ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id); - --- TODO: improve this, important is NVL(curr, 1)!! without this function, empty tables (max = [null]) will only result in sequence deletion -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitem_id) INTO curr FROM cwf_workflowitem; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitem_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitem_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(collectionrole_id) INTO curr FROM cwf_collectionrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_collectionrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_collectionrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitemrole_id) INTO curr FROM cwf_workflowitemrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitemrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitemrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(pooltask_id) INTO curr FROM cwf_pooltask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_pooltask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_pooltask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(claimtask_id) INTO curr FROM cwf_claimtask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_claimtask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_claimtask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(in_progress_user_id) INTO curr FROM cwf_in_progress_user; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_in_progress_user_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_in_progress_user_seq START WITH ' || NVL(curr, 1); -END; -/ diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_xml_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_xml_workflow_migration.sql deleted file mode 100644 index 541af73dfe01..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_xml_workflow_migration.sql +++ /dev/null @@ -1,124 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Database Schema Update for XML/Configurable Workflow (for DSpace 6.0) --- --- This file will automatically create/update your --- DSpace Database tables to support XML/Configurable workflows. --- However, it does NOT migrate your existing classic --- workflows. That step is performed by the corresponding --- "data_workflow_migration.sql" script. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.xmlworkflow.V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration ----------------------------------------------------- - -CREATE SEQUENCE cwf_workflowitem_seq; -CREATE SEQUENCE cwf_collectionrole_seq; -CREATE SEQUENCE cwf_workflowitemrole_seq; -CREATE SEQUENCE cwf_claimtask_seq; -CREATE SEQUENCE cwf_in_progress_user_seq; -CREATE SEQUENCE cwf_pooltask_seq; - - -CREATE TABLE cwf_workflowitem -( - workflowitem_id INTEGER PRIMARY KEY, - item_id RAW(16) REFERENCES item(uuid) UNIQUE, - collection_id RAW(16) REFERENCES collection(uuid), - -- - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), - published_before NUMBER(1), - multiple_files NUMBER(1) - -- Note: stage reached not applicable here - people involved in workflow - -- can always jump around submission UI -); - - -CREATE INDEX cwf_workflowitem_coll_fk_idx ON cwf_workflowitem(collection_id); - - -CREATE TABLE cwf_collectionrole ( -collectionrole_id INTEGER PRIMARY KEY, -role_id VARCHAR2(256), -collection_id RAW(16) REFERENCES collection(uuid), -group_id RAW(16) REFERENCES epersongroup(uuid) -); -ALTER TABLE cwf_collectionrole -ADD CONSTRAINT cwf_collectionrole_unique UNIQUE (role_id, collection_id, group_id); - -CREATE INDEX cwf_cr_coll_role_fk_idx ON cwf_collectionrole(collection_id,role_id); -CREATE INDEX cwf_cr_coll_fk_idx ON cwf_collectionrole(collection_id); - - -CREATE TABLE cwf_workflowitemrole ( - workflowitemrole_id INTEGER PRIMARY KEY, - role_id VARCHAR2(256), - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - eperson_id RAW(16) REFERENCES eperson(uuid), - group_id RAW(16) REFERENCES epersongroup(uuid) -); -ALTER TABLE cwf_workflowitemrole -ADD CONSTRAINT cwf_workflowitemrole_unique UNIQUE (role_id, workflowitem_id, eperson_id, group_id); - -CREATE INDEX cwf_wfir_item_role_fk_idx ON cwf_workflowitemrole(workflowitem_id,role_id); -CREATE INDEX cwf_wfir_item_fk_idx ON cwf_workflowitemrole(workflowitem_id); - - -CREATE TABLE cwf_pooltask ( - pooltask_id INTEGER PRIMARY KEY, - workflowitem_id INTEGER REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - eperson_id RAW(16) REFERENCES EPerson(uuid), - group_id RAW(16) REFERENCES epersongroup(uuid) -); - -CREATE INDEX cwf_pt_eperson_fk_idx ON cwf_pooltask(eperson_id); -CREATE INDEX cwf_pt_workflow_fk_idx ON cwf_pooltask(workflowitem_id); -CREATE INDEX cwf_pt_workflow_eperson_fk_idx ON cwf_pooltask(eperson_id,workflowitem_id); - - - -CREATE TABLE cwf_claimtask ( - claimtask_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - owner_id RAW(16) REFERENCES eperson(uuid) -); - -ALTER TABLE cwf_claimtask -ADD CONSTRAINT cwf_claimtask_unique UNIQUE (step_id, workflowitem_id, workflow_id, owner_id, action_id); - -CREATE INDEX cwf_ct_workflow_fk_idx ON cwf_claimtask(workflowitem_id); -CREATE INDEX cwf_ct_workflow_eperson_fk_idx ON cwf_claimtask(workflowitem_id,owner_id); -CREATE INDEX cwf_ct_eperson_fk_idx ON cwf_claimtask(owner_id); -CREATE INDEX cwf_ct_wfs_fk_idx ON cwf_claimtask(workflowitem_id,step_id); -CREATE INDEX cwf_ct_wfs_action_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id); -CREATE INDEX cwf_ct_wfs_action_e_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id,owner_id); - - -CREATE TABLE cwf_in_progress_user ( - in_progress_user_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - user_id RAW(16) REFERENCES eperson(uuid), - finished NUMBER(1) DEFAULT 0 -); - -ALTER TABLE cwf_in_progress_user -ADD CONSTRAINT cwf_in_progress_user_unique UNIQUE (workflowitem_id, user_id); - -CREATE INDEX cwf_ipu_workflow_fk_idx ON cwf_in_progress_user(workflowitem_id); -CREATE INDEX cwf_ipu_eperson_fk_idx ON cwf_in_progress_user(user_id); - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/xml_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/xml_workflow_migration.sql deleted file mode 100644 index f8f0e564e824..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/xml_workflow_migration.sql +++ /dev/null @@ -1,124 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Database Schema Update for XML/Configurable Workflow --- --- This file will automatically create/update your --- DSpace Database tables to support XML/Configurable workflows. --- However, it does NOT migrate your existing classic --- workflows. That step is performed by the corresponding --- "data_workflow_migration.sql" script. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.migration.V5_0_2014_01_01__XMLWorkflow_Migration ----------------------------------------------------- - -CREATE SEQUENCE cwf_workflowitem_seq; -CREATE SEQUENCE cwf_collectionrole_seq; -CREATE SEQUENCE cwf_workflowitemrole_seq; -CREATE SEQUENCE cwf_claimtask_seq; -CREATE SEQUENCE cwf_in_progress_user_seq; -CREATE SEQUENCE cwf_pooltask_seq; - - -CREATE TABLE cwf_workflowitem -( - workflowitem_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES item(item_id) UNIQUE, - collection_id INTEGER REFERENCES collection(collection_id), - -- - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), - published_before NUMBER(1), - multiple_files NUMBER(1) - -- Note: stage reached not applicable here - people involved in workflow - -- can always jump around submission UI -); - - -CREATE INDEX cwf_workflowitem_coll_fk_idx ON cwf_workflowitem(collection_id); - - -CREATE TABLE cwf_collectionrole ( -collectionrole_id INTEGER PRIMARY KEY, -role_id VARCHAR2(256), -collection_id integer REFERENCES collection(collection_id), -group_id integer REFERENCES epersongroup(eperson_group_id) -); -ALTER TABLE cwf_collectionrole -ADD CONSTRAINT cwf_collectionrole_unique UNIQUE (role_id, collection_id, group_id); - -CREATE INDEX cwf_cr_coll_role_fk_idx ON cwf_collectionrole(collection_id,role_id); -CREATE INDEX cwf_cr_coll_fk_idx ON cwf_collectionrole(collection_id); - - -CREATE TABLE cwf_workflowitemrole ( - workflowitemrole_id INTEGER PRIMARY KEY, - role_id VARCHAR2(256), - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - eperson_id integer REFERENCES eperson(eperson_id), - group_id integer REFERENCES epersongroup(eperson_group_id) -); -ALTER TABLE cwf_workflowitemrole -ADD CONSTRAINT cwf_workflowitemrole_unique UNIQUE (role_id, workflowitem_id, eperson_id, group_id); - -CREATE INDEX cwf_wfir_item_role_fk_idx ON cwf_workflowitemrole(workflowitem_id,role_id); -CREATE INDEX cwf_wfir_item_fk_idx ON cwf_workflowitemrole(workflowitem_id); - - -CREATE TABLE cwf_pooltask ( - pooltask_id INTEGER PRIMARY KEY, - workflowitem_id INTEGER REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - eperson_id INTEGER REFERENCES EPerson(eperson_id), - group_id INTEGER REFERENCES epersongroup(eperson_group_id) -); - -CREATE INDEX cwf_pt_eperson_fk_idx ON cwf_pooltask(eperson_id); -CREATE INDEX cwf_pt_workflow_fk_idx ON cwf_pooltask(workflowitem_id); -CREATE INDEX cwf_pt_workflow_eperson_fk_idx ON cwf_pooltask(eperson_id,workflowitem_id); - - - -CREATE TABLE cwf_claimtask ( - claimtask_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - owner_id integer REFERENCES eperson(eperson_id) -); - -ALTER TABLE cwf_claimtask -ADD CONSTRAINT cwf_claimtask_unique UNIQUE (step_id, workflowitem_id, workflow_id, owner_id, action_id); - -CREATE INDEX cwf_ct_workflow_fk_idx ON cwf_claimtask(workflowitem_id); -CREATE INDEX cwf_ct_workflow_eperson_fk_idx ON cwf_claimtask(workflowitem_id,owner_id); -CREATE INDEX cwf_ct_eperson_fk_idx ON cwf_claimtask(owner_id); -CREATE INDEX cwf_ct_wfs_fk_idx ON cwf_claimtask(workflowitem_id,step_id); -CREATE INDEX cwf_ct_wfs_action_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id); -CREATE INDEX cwf_ct_wfs_action_e_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id,owner_id); - - -CREATE TABLE cwf_in_progress_user ( - in_progress_user_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - user_id integer REFERENCES eperson(eperson_id), - finished NUMBER(1) DEFAULT 0 -); - -ALTER TABLE cwf_in_progress_user -ADD CONSTRAINT cwf_in_progress_user_unique UNIQUE (workflowitem_id, user_id); - -CREATE INDEX cwf_ipu_workflow_fk_idx ON cwf_in_progress_user(workflowitem_id); -CREATE INDEX cwf_ipu_eperson_fk_idx ON cwf_in_progress_user(user_id); - diff --git a/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml index c00b82260ea4..697f8a53d9d9 100644 --- a/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml +++ b/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml @@ -65,6 +65,7 @@ + xml @@ -144,7 +145,7 @@ - + @@ -158,6 +159,12 @@ + + + + + + diff --git a/dspace-api/src/main/resources/spring/spring-dspace-addon-requestitem-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-addon-requestitem-services.xml deleted file mode 100644 index b9c11f8164d6..000000000000 --- a/dspace-api/src/main/resources/spring/spring-dspace-addon-requestitem-services.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - diff --git a/dspace-api/src/main/resources/spring/spring-dspace-core-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-core-services.xml index 14b66cca7c9c..77859506546c 100644 --- a/dspace-api/src/main/resources/spring/spring-dspace-core-services.xml +++ b/dspace-api/src/main/resources/spring/spring-dspace-core-services.xml @@ -16,15 +16,6 @@ http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util.xsd"> - - - - @@ -34,12 +25,6 @@ - - - - diff --git a/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/publication-coar-types.xml b/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/publication-coar-types.xml new file mode 100644 index 000000000000..ee726233cc78 --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/publication-coar-types.xml @@ -0,0 +1,239 @@ + + + + + + A resource consisting primarily of words for reading. Examples include books, letters, dissertations, poems, newspapers, articles, archives of mailing lists. Note that facsimiles or images of texts are still of the genre Text. [Source: http://purl.org/dc/dcmitype/Text] + + + + An annotation in the sense of a legal note is a legally explanatory comment on a decision handed down by a court or arbitral tribunal. [Source: DRIVER info:eu-repo definition] + + + + A list of the books and articles that have been used by someone when writing a particular book or article [Source: https://dictionary.cambridge.org/dictionary/english/bibliography] + + + + A piece of writing or other item of content published on a blog. [Source: https://www.lexico.com/definition/blog_post] + + + + A non-serial publication that is complete in one volume or a designated finite number of volumes. [Source: Adapted from http://purl.org/eprint/type/Book] + + + + A defined chapter or section of a book, usually with a separate title or number. [Source: http://purl.org/spar/fabio/BookChapter] + + + + + + All kind of digital resources contributed to a conference, like conference presentation (slides), conference report, conference lecture, abstracts, demonstrations. For conference papers, posters or proceedings the specific sub-concepts should be used. [COAR definition] + + + + A paper, typically the realization of a research paper reporting original research findings. Use this label when the paper is not published in a proceeding. [Source: Adapted from http://purl.org/spar/fabio/ConferencePaper] + + + + A display poster, typically containing text with illustrative figures and/or tables, usually reporting research results or proposing hypotheses, submitted for acceptance to and/or presented at a conference, seminar, symposium, workshop or similar event. Use this label when the poster is not published in a proceeding. [Source: http://purl.org/spar/fabio/ConferencePoster] + + + + A set of slides containing text, tables or figures, designed to communicate ideas or research results, for projection and viewing by an audience at a conference, symposium, seminar, lecture, workshop or other gatherings. [Source: Adapted from http://purl.org/spar/fabio/Presentation] + + + + Conference proceedings is the official record of a conference meeting. It is a collection of documents which corresponds to the presentations given at the conference. It may include additional content. [Source: http://www.ieee.org/documents/confprocdefined.pdf ] + + + + A paper, published within a conference proceeding, typically the realization of a research paper reporting original research findings. [Source: Adapted from http://purl.org/spar/fabio/ConferencePaper] + + + + A display poster, published within a conference proceeding, typically containing text with illustrative figures and/or tables, usually reporting research results or proposing hypotheses, submitted for acceptance to and/or presented at a conference, seminar, symposium, workshop or similar event. [Source: Adapted http://purl.org/spar/fabio/ConferencePoster] + + + + + + + + A journal is a serial publication devoted to disseminating original research and current developments on a subject. (Adapted from ODLIS) [Source: http://dspacecris.eurocris.org/cris/classcerif/classcerif00422] + + + + A brief essay expressing the opinion or position of the chief editor(s) of a (academic) journal with respect to a current political, social, cultural, or professional issue. [Source: Adapted from ODLIS [Source: http://www.abc-clio.com/ODLIS/odlis_e.aspx ] + + + + An article, typically the realization of a research paper reporting original research findings, published in a journal issue. [Source: http://purl.org/spar/fabio/JournalArticle] + + + + A formal correction to an error introduced by the author into a previously published document. (adapted from https://sparontologies.github.io/fabio/current/fabio.html#d4e2712) + + + + A data paper is a scholarly publication describing a particular dataset or group of dataset, published in the form of a peer-reviewed article in a scholarly journal. The main purpose of a data paper is to describe data, the circumstances of their collection, and information related to data features, access and potential reuse. Adapted from https://en.wikipedia.org/wiki/Data_paper and http://www.gbif.org/publishing-data/data-papers + + + + A research article is a primary source, that is, it reports the methods and results of an original study performed by the authors. (adapted from http://apus.libanswers.com/faq/2324) + + + + A review article is a secondary source, that is, it is written about other articles, and does not report original research of its own. [Source: Adapted from http://apus.libanswers.com/faq/2324] + + + + A software paper should include the rationale for the development of the tool and details of the code used for its construction. [Source: Adapted from https://f1000research.com/for-authors/article-guidelines/software-tool-articles ] + + + + + + A letter addressed to the editor and comments on or discussed an item previously published by that periodical, or of interest to its readership. [Source: Adapted from http://purl.org/spar/fabio/Letter] + + + + + + Transcription of an oral presentation/talk intended to present information or teach people about a particular subject, for example by a university or college teacher. [Source: Adopted from https://en.wikipedia.org/wiki/Lecture] + + + + A brief description of important new research, also known as “communication”. [Source: https://cerif.eurocris.org/vocab/html/OutputTypes.html#Letter] + + + + A popular interest periodical usually containing articles on a variety of topics, written by various authors in a nonscholarly style or a trade publication, unlike a consumer publication, covers a specific topic for people who work in that particular field or industry. [Source: Adapted from https://www.thebalance.com/what-is-a-trade-publication-exactly-2316039 and http://www.abc-clio.com/ODLIS/odlis_m.aspx] + + + + A manuscript is a work of any kind (text, inscription, music score, map, etc.) written entirely by hand. [Source: https://products.abc-clio.com/ODLIS/odlis_m.aspx] + + + + Symbols used to write music, as in a music score, and to express mathematical concepts. +[Source: Adapted from https://products.abc-clio.com/ODLIS/odlis_n.aspx] + + + + A non-peer reviewed periodical, usually published daily or weekly, consisting primarily of editorials and news items concerning current or recent events and matters of public interest. [Source: http://purl.org/spar/fabio/Newspaper] + + + + Work consisting of a news item appearing in a general-interest newspaper or other general news periodical, containing information of current and timely interest in a field. (Adapted from http://www.reference.md/files/D018/mD018431.html ) + + + + + + A resource type that is not included in existing terms under the top concept "Text". [COAR definition] + + + + A preprint is a scientific manuscript without peer-review and has not yet been accepted by a journal, typicaly submitted to a public server/ repository by the author. [Source: Adapted from https://asapbio.org/preprint-info/preprint-faq#qaef-637] + + + + A report is a separately published record of research findings, research still in progress, policy developments and events, or other technical findings, usually bearing a report number and sometimes a grant number assigned by the funding agency. Also, an official record of the activities of a committee or corporate entity, the proceedings of a government body, or an investigation by an agency, whether published or private, usually archived or submitted to a higher authority, voluntarily or under mandate. In a more general sense, any formal account of facts or information related to a specific event or phenomenon, sometimes given at regular intervals. [Source: http://lu.com/odlis/odlis_R.cfm#report ] + + + + A work that reports on the results of a research study to evaluate interventions or exposures on biomedical or health-related outcomes. The two main types of clinical studies are interventional studies (clinical trials) and observational studies. While most clinical studies concern humans, this publication type may be used for clinical veterinary articles meeting the requisites for humans. [Source: https://www.ncbi.nlm.nih.gov/mesh/2009830] + + + + A formal statement describing how research data will be managed and documented throughout a research project and the terms regarding the subsequent deposit of the data with a data repository for long-term management and preservation. [Source: https://casrai.org/rdm-glossary] + + + + A formal note distributed internally to one or more persons in a company, agency, organization, or institution, with a header indicating the date it was sent and stating to whom it is addressed (To:), from whom it is sent (From:), and the subject of the text (Re:). Unlike a letter, a memo does not require a full salutation or signature at the end of the text--the sender may simply initial his or her name in the header. [Source: https://products.abc-clio.com/ODLIS/odlis_m.aspx#memorandum] + + + + A policy report presents what is known about a particular issue or problem. It assembles facts and evidence to help readers understand complex issues and form a response. It might aim to be neutral, or it might aim to persuade readers in a particular direction. [Source: https://www.uow.edu.au/student/learning-co-op/assessments/policy-report/#] + + + + A document containing a project report, intended to be delivered to a customer or funding agency describing the results achieved within a specific project. [Source: http://purl.org/spar/fabio/ProjectReportDocument] + + + + The protocol is a detailed plan of the research study including a project summary, project description covering the rationale, objectives, methodology, data management and analysis, ethical considerations, gender issues and references. [Source: Adapted from https://www.who.int/publications/i/item/a-practical-guide-for-health-researchers] + + + + It is publication that reports on the findings of a research project or alternatively scientific observations on or about a subject. [Source: Adapted from https://en.wikipedia.org/wiki/Research_report] + + + + A document that describes the process, progress, or results of technical or scientific research or the state of a technical or scientific research problem. It might also include recommendations and conclusions of the research. [Source: http://guides.library.cornell.edu/ecommons/types] + + + + + + A research proposal is a document proposing a research project, generally in the sciences or academia, and generally constitutes a request for sponsorship of that research. [Source: https://en.wikipedia.org/wiki/Research_proposal] + + + + A review of others' published work. [Source: Adapted from http://purl.org/spar/fabio/Review] + + + + A written review and critical analysis of the content, scope and quality of a book or other monographic work. [Source: http://purl.org/spar/fabio/BookReview] + + + + A commentary is a more in-depth analysis written to draw attention to a work already published. Commentaries are somewhat like “reviews” in that the author presents his or her analysis of a work and why it would be of interest to a specific audience. [Source: https://www.enago.com/academy/perspective-opinion-and-commentary-pieces] + + + + An evaluation of scientific, academic, or professional work by others working in the same field. [Source: Adopted from https://schema.datacite.org/meta/kernel-4.4/doc/DataCite-MetadataKernel_v4.4.pdf] + + + + + + Technical documentation refers to any type of documentation that describes handling, functionality and architecture of a technical product or a product under development or use. [Source: https://en.wikipedia.org/wiki/Technical_documentation] + + + + A book authored by a student containing a formal presentations of research outputs submitted for examination in completion of a course of study at an institution of higher education, to fulfil the requirements for an academic degree. Also know as a dissertation. [Source: http://purl.org/spar/fabio/Thesis] + + + + A thesis reporting a research project undertaken as part of an undergraduate course of education leading to a bachelor's degree. [Source: http://purl.org/spar/fabio/BachelorsThesis] + + + + A thesis reporting the research undertaken during a period of graduate study leading to a doctoral degree. [Source: http://purl.org/spar/fabio/DoctoralThesis] + + + + A thesis reporting a research project undertaken as part of a graduate course of education leading to a master's degree. [Source: http://purl.org/spar/fabio/MastersThesis] + + + + + + A written record of words spoken in court proceedings or in a speech, interview, broadcast, or sound recording. [Source: Adapted from https://products.abc-clio.com/ODLIS/odlis_t.aspx] + + + + A working or discussion paper circulated publicly or among a group of peers. Certain disciplines, for example economics, issue working papers in series. [Source: http://www.ukoln.ac.uk/repositories/digirep/index/Eprints_Type_Vocabulary_Encoding_Scheme#:~:text=http%3A//purl.org/eprint/type/WorkingPaper] + + + + + + A resource type that is not included in existing terms. [COAR definition] + + + + \ No newline at end of file diff --git a/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/publication-coar-types_it.xml b/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/publication-coar-types_it.xml new file mode 100644 index 000000000000..1fcd85735707 --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/publication-coar-types_it.xml @@ -0,0 +1,239 @@ + + + + + + Una risorsa costituita principalmente da parole da leggere. Esempi sono libri, lettere, tesi di laurea, poesie, giornali, articoli, archivi di mailing list. Si noti che i facsimili o le immagini di testi appartengono ancora al genere Testo. [Fonte: http://purl.org/dc/dcmitype/Text] + + + + Un'annotazione nel senso di nota legale è un commento giuridicamente esplicativo su una decisione emessa da un tribunale o da un tribunale arbitrale. [Fonte: DRIVER info:eu-repo definition] + + + + Un elenco di libri e articoli che sono stati utilizzati da qualcuno per scrivere un particolare libro o articolo [Fonte: https://dictionary.cambridge.org/dictionary/english/bibliography] + + + + Un pezzo di scrittura o altro contenuto pubblicato su un blog. [Fonte: https://www.lexico.com/definition/blog_post] + + + + Una pubblicazione non seriale completa in un volume o in un numero finito di volumi. [Fonte: Adattato da http://purl.org/eprint/type/Book] + + + + Un capitolo o una sezione definita di un libro, di solito con un titolo o un numero separato. [Fonte: http://purl.org/spar/fabio/BookChapter] + + + + + + Tutti i tipi di risorse digitali fornite a una conferenza, come presentazioni di conferenze (diapositive), relazioni di conferenze, lezioni di conferenze, abstract, dimostrazioni. Per i documenti, i poster o gli atti di una conferenza si devono usare i sottoconcetti specifici. [definizione COAR] + + + + Un documento, tipicamente la realizzazione di un lavoro di ricerca che riporta i risultati di una ricerca originale. Utilizzare questa etichetta quando l'articolo non è stato pubblicato in un documento. [Fonte: Adattato da http://purl.org/spar/fabio/ConferencePaper] + + + + Un poster espositivo, tipicamente contenente un testo con figure e/o tabelle illustrative, che di solito riporta i risultati di una ricerca o propone ipotesi, presentato per essere accettato e/o presentato a una conferenza, un seminario, un simposio, un workshop o un evento simile. Utilizzare questa etichetta se il poster non è stato pubblicato in un documento. [Fonte: http://purl.org/spar/fabio/ConferencePoster] + + + + Una serie di diapositive contenenti testo, tabelle o figure, progettate per comunicare idee o risultati di ricerca, per la proiezione e la visione da parte di un pubblico in occasione di una conferenza, un simposio, un seminario, una lezione, un workshop o altri incontri. [Fonte: Adattato da http://purl.org/spar/fabio/Presentation] + + + + Gli atti della conferenza sono il resoconto ufficiale di una riunione di conferenza. Si tratta di una raccolta di documenti che corrispondono alle presentazioni tenute durante la conferenza. Può includere contenuti aggiuntivi. [Fonte: http://www.ieee.org/documents/confprocdefined.pdf ] + + + + Un documento, pubblicato all'interno degli atti di una conferenza, è in genere la realizzazione di un lavoro di ricerca che riporta i risultati di una ricerca originale. [Fonte: Adattato da http://purl.org/spar/fabio/ConferencePaper] + + + + Un poster espositivo, pubblicato all'interno di un documento di conferenza, contenente solitamente un testo con figure e/o tabelle illustrative, che di solito riporta risultati di ricerca o propone ipotesi, presentato per l'accettazione e/o presentato a una conferenza, un seminario, un simposio, un workshop o un evento simile. [Fonte: adattato http://purl.org/spar/fabio/ConferencePoster] + + + + + + + + Una rivista è una pubblicazione seriale dedicata alla diffusione di ricerche originali e di sviluppi attuali su un argomento. (Adattato da ODLIS) [Fonte: http://dspacecris.eurocris.org/cris/classcerif/classcerif00422] + + + + Un breve saggio che esprime l'opinione o la posizione del direttore (o dei direttori) di una rivista (accademica) rispetto a una questione politica, sociale, culturale o professionale attuale. [Fonte: Adattato da ODLIS [Fonte: http://www.abc-clio.com/ODLIS/odlis_e.aspx ] + + + + Un articolo, tipicamente la realizzazione di un lavoro di ricerca che riporta i risultati di una ricerca originale, pubblicato in un numero di una rivista. [Fonte: http://purl.org/spar/fabio/JournalArticle] + + + + Una correzione formale di un errore introdotto dall'autore in un documento precedentemente pubblicato. (adattato da https://sparontologies.github.io/fabio/current/fabio.html#d4e2712) + + + + Un documento sui dati è una pubblicazione scientifica che descrive un particolare insieme di dati o un gruppo di dati, pubblicata sotto forma di articolo sottoposto a revisione paritaria in una rivista scientifica. Lo scopo principale di un documento sui dati è descrivere i dati, le circostanze della loro raccolta e le informazioni relative alle caratteristiche dei dati, all'accesso e al potenziale riutilizzo. Adattato da https://en.wikipedia.org/wiki/Data_paper e http://www.gbif.org/publishing-data/data-papers + + + + Un articolo di ricerca è una fonte primaria, cioè riporta i metodi e i risultati di uno studio originale condotto dagli autori. (adattato da http://apus.libanswers.com/faq/2324) + + + + Un articolo di revisione è una fonte secondaria, cioè è scritto su altri articoli e non riporta ricerche originali proprie. [Fonte: Adattato da http://apus.libanswers.com/faq/2324] + + + + Un documento sul software dovrebbe includere le motivazioni per lo sviluppo dello strumento e i dettagli del codice utilizzato per la sua costruzione. [Fonte: Adattato da https://f1000research.com/for-authors/article-guidelines/software-tool-articles ] + + + + + + Una lettera indirizzata all'editore che commenta o discute un articolo precedentemente pubblicato da quel periodico, o di interesse per i suoi lettori. [Fonte: Adattato da http://purl.org/spar/fabio/Letter] + + + + + + Trascrizione di una presentazione orale o di un discorso destinato a presentare informazioni o a insegnare alle persone un particolare argomento, ad esempio da parte di un insegnante universitario o di un college. [Fonte: Adattato da https://en.wikipedia.org/wiki/Lecture] + + + + Breve descrizione di una nuova ricerca importante, nota anche come "comunicazione". [Fonte: https://cerif.eurocris.org/vocab/html/OutputTypes.html#Letter] + + + + Un periodico di interesse popolare che di solito contiene articoli su una varietà di argomenti, scritti da vari autori in uno stile non accademico o una pubblicazione commerciale che, a differenza di una pubblicazione di consumo, tratta un argomento specifico per le persone che lavorano in quel particolare campo o settore. [Fonte: Adattato da https://www.thebalance.com/what-is-a-trade-publication-exactly-2316039 e http://www.abc-clio.com/ODLIS/odlis_m.aspx] + + + + Un manoscritto è un'opera di qualsiasi tipo (testo, iscrizione, spartito musicale, mappa, ecc.) scritta interamente a mano. [Fonte: https://products.abc-clio.com/ODLIS/odlis_m.aspx] + + + + Simboli usati per scrivere musica, come in uno spartito, e per esprimere concetti matematici. +[Source: Adapted from https://products.abc-clio.com/ODLIS/odlis_n.aspx] + + + + Un periodico non recensito, di solito pubblicato quotidianamente o settimanalmente, che consiste principalmente in editoriali e notizie riguardanti eventi attuali o recenti e questioni di interesse pubblico. [Fonte: http://purl.org/spar/fabio/Newspaper] + + + + Lavoro che consiste in una notizia apparsa su un giornale di interesse generale o su un altro periodico di informazione generale, contenente informazioni di interesse attuale e tempestivo in un campo. (Adattato da http://www.reference.md/files/D018/mD018431.html ) + + + + + + Un tipo di risorsa che non è incluso nei termini esistenti sotto il concetto superiore "Testo". [definizione COAR] + + + + Un preprint è un manoscritto scientifico non sottoposto a peer-review e non ancora accettato da una rivista, tipicamente inviato dall'autore a un server/repository pubblico. [Fonte: Adattato da https://asapbio.org/preprint-info/preprint-faq#qaef-637] + + + + Un rapporto è un resoconto pubblicato separatamente dei risultati di una ricerca, di una ricerca ancora in corso, di sviluppi ed eventi politici o di altri risultati tecnici, di solito con un numero di rapporto e talvolta di sovvenzione assegnato dall'agenzia di finanziamento. È anche un documento ufficiale delle attività di un comitato o di un'entità aziendale, dei procedimenti di un ente governativo o di un'indagine di un'agenzia, pubblicato o privato, solitamente archiviato o presentato a un'autorità superiore, volontariamente o su mandato. In senso più generale, qualsiasi resoconto formale di fatti o informazioni relativi a un evento o fenomeno specifico, talvolta fornito a intervalli regolari. [Fonte: http://lu.com/odlis/odlis_R.cfm#report ] + + + + Un lavoro che riporta i risultati di uno studio di ricerca per valutare interventi o esposizioni su esiti biomedici o sanitari. I due tipi principali di studi clinici sono gli studi interventistici (trial clinici) e gli studi osservazionali. Sebbene la maggior parte degli studi clinici riguardi gli esseri umani, questo tipo di pubblicazione può essere utilizzato per articoli clinici veterinari che soddisfano i requisiti per gli esseri umani. [Fonte: https://www.ncbi.nlm.nih.gov/mesh/2009830] + + + + Una dichiarazione formale che descrive come i dati di ricerca saranno gestiti e documentati nel corso di un progetto di ricerca e i termini relativi al successivo deposito dei dati presso un archivio di dati per la gestione e la conservazione a lungo termine. [Fonte: https://casrai.org/rdm-glossary] + + + + Una nota formale distribuita internamente a una o più persone in un'azienda, agenzia, organizzazione o istituzione, con un'intestazione che indica la data di invio e che specifica a chi è indirizzata (To:), da chi è inviata (From:) e l'oggetto del testo (Re:). A differenza di una lettera, un memo non richiede un saluto completo o una firma alla fine del testo: il mittente può semplicemente siglare il proprio nome nell'intestazione. [Fonte: https://products.abc-clio.com/ODLIS/odlis_m.aspx#memorandum] + + + + Un rapporto politico presenta ciò che si sa su una particolare questione o problema. Riunisce fatti e prove per aiutare i lettori a comprendere questioni complesse e a formulare una risposta. Può mirare a essere neutrale o a persuadere i lettori in una particolare direzione. [Fonte: https://www.uow.edu.au/student/learning-co-op/assessments/policy-report/#] + + + + Un documento contenente una relazione di progetto, destinato a essere consegnato a un cliente o a un'agenzia di finanziamento, che descrive i risultati ottenuti nell'ambito di un progetto specifico. [Fonte: http://purl.org/spar/fabio/ProjectReportDocument] + + + + Il protocollo è un piano dettagliato dello studio di ricerca che include un sommario del progetto, una descrizione del progetto che copre il razionale, gli obiettivi, la metodologia, la gestione e l'analisi dei dati, le considerazioni etiche, le questioni di genere e i riferimenti. [Fonte: Adattato da https://www.who.int/publications/i/item/a-practical-guide-for-health-researchers] + + + + È una pubblicazione che riporta i risultati di un progetto di ricerca o, in alternativa, osservazioni scientifiche su un argomento. [Fonte: Adattato da https://en.wikipedia.org/wiki/Research_report] + + + + Un documento che descrive il processo, i progressi o i risultati di una ricerca tecnica o scientifica o lo stato di un problema di ricerca tecnica o scientifica. Può anche includere raccomandazioni e conclusioni della ricerca. [Fonte: http://guides.library.cornell.edu/ecommons/types] + + + + + + Una proposta di ricerca è un documento che propone un progetto di ricerca, generalmente in ambito scientifico o accademico, e costituisce generalmente una richiesta di sponsorizzazione di tale ricerca. [Fonte: https://en.wikipedia.org/wiki/Research_proposal] + + + + Una rassegna di lavori pubblicati da altri. [Fonte: adattato da http://purl.org/spar/fabio/Review] + + + + Una recensione scritta e un'analisi critica del contenuto, della portata e della qualità di un libro o di un'altra opera monografica. [Fonte: http://purl.org/spar/fabio/BookReview] + + + + Un commento è un'analisi più approfondita scritta per attirare l'attenzione su un'opera già pubblicata. I commenti sono in qualche modo simili alle "recensioni", in quanto l'autore presenta la propria analisi di un'opera e il motivo per cui sarebbe interessante per un pubblico specifico. [Fonte: https://www.enago.com/academy/perspective-opinion-and-commentary-pieces] + + + + Una valutazione di un lavoro scientifico, accademico o professionale da parte di altri che lavorano nello stesso campo. [Fonte: ] Adottato da https://schema.datacite.org/meta/kernel-4.4/doc/DataCite-MetadataKernel_v4.4.pdf] + + + + + + La documentazione tecnica si riferisce a qualsiasi tipo di documentazione che descrive la gestione, la funzionalità e l'architettura di un prodotto tecnico o di un prodotto in fase di sviluppo o di utilizzo. [Fonte: https://en.wikipedia.org/wiki/Technical_documentation] + + + + Un libro scritto da uno studente che contiene una presentazione formale dei risultati della ricerca presentata per l'esame al termine di un corso di studi presso un istituto di istruzione superiore, per soddisfare i requisiti di un titolo accademico. Conosciuto anche come tesi di laurea. [Fonte: http://purl.org/spar/fabio/Thesis] + + + + Una tesi che riporta un progetto di ricerca intrapreso nell'ambito di un corso di studi universitario che porta al conseguimento di un diploma di laurea. [Fonte: http://purl.org/spar/fabio/BachelorsThesis] + + + + Una tesi che riporta la ricerca intrapresa durante un periodo di studi universitari che porta al conseguimento di un dottorato. [Fonte: http://purl.org/spar/fabio/DoctoralThesis] + + + + Una tesi che riporta un progetto di ricerca intrapreso nell'ambito di un corso di laurea che porta al conseguimento di un master. [Fonte: http://purl.org/spar/fabio/MastersThesis] + + + + + + Una registrazione scritta di parole pronunciate in un procedimento giudiziario o in un discorso, un'intervista, una trasmissione o una registrazione sonora. [Fonte: Adattato da https://products.abc-clio.com/ODLIS/odlis_t.aspx] + + + + Un documento di lavoro o di discussione diffuso pubblicamente o tra un gruppo di colleghi. Alcune discipline, ad esempio l'economia, pubblicano documenti di lavoro in serie. [Fonte: http://www.ukoln.ac.uk/repositories/digirep/index/Eprints_Type_Vocabulary_Encoding_Scheme#:~:text=http%3A//purl.org/eprint/type/WorkingPaper]. + + + + + + Un tipo di risorsa che non è incluso nei termini esistenti. [Definizione COAR] + + + + \ No newline at end of file diff --git a/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/publication-coar-types_uk.xml b/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/publication-coar-types_uk.xml new file mode 100644 index 000000000000..9e0bb808e5f7 --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/publication-coar-types_uk.xml @@ -0,0 +1,239 @@ + + + + + + Ресурс, що складається переважно зі слів для читання. Прикладами можуть бути книги, листи, дисертації, вірші, газети, статті, архіви списків розсилки. Зауважте, що факсиміле або зображення текстів все ще належать до жанру Текст. [Джерело: http://purl.org/dc/dcmitype/Text] + + + + Анотація в значенні юридичної замітки - це юридично роз'яснювальний коментар до рішення, винесеного судом або арбітражним судом. [Джерело: DRIVER info:eu-repo визначення]. + + + + Список книг і статей, які були використані кимось при написанні певної книги або статті [Джерело: https://dictionary.cambridge.org/dictionary/english/bibliography]. + + + + Твір або інший елемент контенту, опублікований у блозі. [Джерело: https://www.lexico.com/definition/blog_post] + + + + Несерійне видання, завершене в одному томі або визначеній кінцевій кількості томів. [Джерело: адаптовано з http://purl.org/eprint/type/Book] + + + + Певна глава або розділ книги, зазвичай з окремим заголовком або номером. [Джерело: http://purl.org/spar/fabio/BookChapter] + + + + + + Всі види цифрових ресурсів, що були представлені на конференції, такі як презентація конференції (слайди), доповідь конференції, лекція конференції, тези, демонстрації. Для конференційних доповідей, постерів або матеріалів слід використовувати спеціальні підпоняття. [Визначення COAR]. + + + + Документ, як правило, реалізація дослідницької роботи, в якій викладено оригінальні результати дослідження. Використовуйте це позначення, коли стаття не публікується у збірнику наукових праць. [Джерело: адаптовано з http://purl.org/spar/fabio/ConferencePaper] + + + + Плакат для демонстрації, що зазвичай містить текст з ілюстративними рисунками та/або таблицями, зазвичай повідомляє про результати дослідження або пропонує гіпотези, поданий для прийняття та/або представлений на конференції, семінарі, симпозіумі, воркшопі або подібному заході. Використовуйте це позначення, коли постер не публікується у збірнику матеріалів. [Джерело: http://purl.org/spar/fabio/ConferencePoster] + + + + Набір слайдів, що містять текст, таблиці або малюнки, призначені для передачі ідей або результатів досліджень, для проекції та перегляду аудиторією на конференції, симпозіумі, семінарі, лекції, воркшопі або інших заходах. [Джерело: адаптовано з http://purl.org/spar/fabio/Presentation] + + + + Збірник матеріалів конференції - це офіційний звіт про роботу конференції. Це збірник документів, який відповідає презентаціям, представленим на конференції. Він може містити додаткову інформацію. [Джерело: http://www.ieee.org/documents/confprocdefined.pdf ]. + + + + Стаття, опублікована в матеріалах конференції, як правило, є реалізацією наукової роботи, в якій викладено оригінальні результати дослідження. [Джерело: адаптовано з http://purl.org/spar/fabio/ConferencePaper] + + + + Плакат, опублікований у збірнику матеріалів конференції, який зазвичай містить текст з ілюстративними рисунками та/або таблицями, зазвичай повідомляє про результати досліджень або пропонує гіпотези, подається для прийняття та/або представляється на конференції, семінарі, симпозіумі, воркшопі або подібному заході. [Джерело: Адаптовано за http://purl.org/spar/fabio/ConferencePoster] + + + + + + + + Журнал - це серійне видання, призначене для поширення оригінальних досліджень і поточних розробок з певної тематики. (Адаптовано з ODLIS) [Джерело: http://dspacecris.eurocris.org/cris/classcerif/classcerif00422] + + + + Коротке есе, що виражає думку або позицію головного редактора (академічного) журналу щодо актуального політичного, соціального, культурного або професійного питання. [Джерело: Адаптовано з ODLIS [Джерело: http://www.abc-clio.com/ODLIS/odlis_e.aspx ]. + + + + Стаття, як правило, реалізація дослідницької роботи, що повідомляє про оригінальні результати дослідження, опублікована у випуску журналу. [Джерело: http://purl.org/spar/fabio/JournalArticle] + + + + Формальне виправлення помилки, внесеної автором у раніше опублікований документ. (адаптовано з https://sparontologies.github.io/fabio/current/fabio.html#d4e2712) + + + + Data paper - це наукова публікація, що описує певний набір або групу наборів даних, опублікована у вигляді рецензованої статті в науковому журналі. Основна мета документу даних - описати дані, обставини їхнього збору, а також інформацію, пов'язану з характеристиками даних, доступом до них і потенційним повторним використанням. Взято з https://en.wikipedia.org/wiki/Data_paper та http://www.gbif.org/publishing-data/data-papers + + + + Дослідницька стаття є першоджерелом, тобто повідомляє про методи і результати оригінального дослідження, виконаного авторами. (адаптовано з http://apus.libanswers.com/faq/2324) + + + + Оглядова стаття є вторинним джерелом, тобто вона написана про інші статті і не повідомляє про власні оригінальні дослідження. [Джерело: Адаптовано з http://apus.libanswers.com/faq/2324] + + + + Стаття про програмне забезпечення повинна містити обґрунтування розробки інструменту та деталі коду, використаного для його побудови. [Джерело: Адаптовано з https://f1000research.com/for-authors/article-guidelines/software-tool-articles ] + + + + + + Лист на ім'я редактора, в якому коментується або обговорюється тема, раніше опублікована цим виданням, або така, що становить інтерес для його читацької аудиторії. [Джерело: адаптовано з http://purl.org/spar/fabio/Letter] + + + + + + Транскрипція усного виступу/розмови, що має на меті представити інформацію або навчити людей певному предмету, наприклад, викладача університету або коледжу. [Джерело: Запозичено з https://en.wikipedia.org/wiki/Lecture] + + + + Короткий опис нового важливого дослідження, також відомого як "комунікація". [Джерело: https://cerif.eurocris.org/vocab/html/OutputTypes.html#Letter] + + + + Періодичне видання, яке зазвичай містить статті на різноманітні теми, написані різними авторами в ненауковому стилі, або професійне видання, на відміну від споживчого видання, висвітлює конкретну тему для людей, які працюють у цій галузі чи індустрії. [Джерело: Адаптовано з https://www.thebalance.com/what-is-a-trade-publication-exactly-2316039 та http://www.abc-clio.com/ODLIS/odlis_m.aspx] + + + + Рукопис - це твір будь-якого виду (текст, напис, музична партитура, карта тощо), написаний повністю від руки. [Джерело: https://products.abc-clio.com/ODLIS/odlis_m.aspx] + + + + Символи, що використовуються для написання музики, як у нотному записі, а також для вираження математичних понять. +[Source: Adapted from https://products.abc-clio.com/ODLIS/odlis_n.aspx] + + + + Періодичне видання, що не рецензується, зазвичай виходить щодня або щотижня, складається переважно з редакційних статей та новин, що стосуються поточних або нещодавніх подій і питань, які становлять суспільний інтерес. [Джерело: http://purl.org/spar/fabio/Newspaper] + + + + Робота, що складається з новини, яка з'являється в газеті або іншому періодичному виданні, що містить актуальну та своєчасну інформацію в тій чи іншій галузі. (Запозичено з http://www.reference.md/files/D018/mD018431.html) + + + + + + Тип ресурсу, який не включено до існуючих термінів під головним поняттям "Текст". [визначення COAR]. + + + + Препринт - це науковий рукопис, який ще не пройшов рецензування і не був прийнятий журналом, як правило, розміщений автором на публічному сервері/репозиторії. [Джерело: Адаптовано з https://asapbio.org/preprint-info/preprint-faq#qaef-637] + + + + Звіт - це окремо опублікований запис результатів досліджень, досліджень, які ще тривають, політичних змін і подій або інших технічних результатів, який зазвичай має номер звіту, а іноді й номер гранту, присвоєний установою, що фінансує проект. Крім того, офіційний звіт про діяльність комітету або юридичної особи, засідання державного органу або розслідування, проведене агентством, як опублікований, так і приватний, зазвичай архівується або передається до вищого органу влади, добровільно або за мандатом. У більш загальному сенсі, будь-який офіційний звіт про факти або інформацію, пов'язану з конкретною подією або явищем, який іноді подається через регулярні проміжки часу. [Джерело: http://lu.com/odlis/odlis_R.cfm#report ]. + + + + Робота, яка звітує про результати наукового дослідження з метою оцінки втручань або впливів на біомедичні або пов'язані зі здоров'ям результати. Двома основними типами клінічних досліджень є інтервенційні дослідження (клінічні випробування) та обсерваційні дослідження. Хоча більшість клінічних досліджень стосуються людей, цей тип публікації може бути використаний для клінічних ветеринарних статей, що відповідають вимогам для людей. [Джерело: https://www.ncbi.nlm.nih.gov/mesh/2009830] + + + + Офіційна заява, що описує, як будуть управлятися і документуватися дослідницькі дані протягом дослідницького проекту, а також умови подальшої передачі даних в сховище даних для довгострокового управління і збереження. [Джерело: https://casrai.org/rdm-glossary] + + + + Офіційна записка, що розповсюджується всередині компанії, агентства, організації чи установи одній або кільком особам, із заголовком, що вказує на дату її надсилання та зазначає, кому вона адресована (To:), від кого вона надсилається (From:) та тему тексту (Re:). На відміну від листа, службова записка не вимагає повного привітання або підпису в кінці тексту - відправник може просто вказати своє ім'я в заголовку. [Джерело: https://products.abc-clio.com/ODLIS/odlis_m.aspx#memorandum] + + + + В аналітичному звіті представлено те, що відомо про певне питання чи проблему. У ньому зібрані факти і докази, які допомагають читачам зрозуміти складні питання і сформувати відповідь. Він може бути нейтральним, а може мати на меті переконати читачів у певному напрямку. [Джерело: https://www.uow.edu.au/student/learning-co-op/assessments/policy-report/#] + + + + Документ, що містить звіт про проект, призначений для надання замовнику або фінансовій установі, який описує результати, досягнуті в рамках конкретного проекту. [Джерело: http://purl.org/spar/fabio/ProjectReportDocument] + + + + Протокол - це детальний план наукового дослідження, що включає резюме проекту, опис проекту, що охоплює обґрунтування, цілі, методологію, управління даними та їх аналіз, етичні міркування, гендерні питання та посилання. [Джерело: Адаптовано з https://www.who.int/publications/i/item/a-practical-guide-for-health-researchers] + + + + Це публікація, яка повідомляє про результати дослідницького проекту або, як альтернатива, наукові спостереження на певну тему або про неї. [Джерело: адаптовано з https://en.wikipedia.org/wiki/Research_report] + + + + Документ, який описує процес, хід або результати технічного чи наукового дослідження або стан проблеми технічного чи наукового дослідження. Він також може містити рекомендації та висновки дослідження. [Джерело: http://guides.library.cornell.edu/ecommons/types] + + + + + + Дослідницька пропозиція - це документ, що пропонує дослідницький проект, як правило, в галузі науки або освіти, і, як правило, являє собою прохання про спонсорську підтримку цього дослідження. [Джерело: https://en.wikipedia.org/wiki/Research_proposal] + + + + Рецензія на чужі опубліковані роботи. [Джерело: адаптовано з http://purl.org/spar/fabio/Review] + + + + Письмовий огляд і критичний аналіз змісту, обсягу та якості книги або іншої монографічної праці. [Джерело: http://purl.org/spar/fabio/BookReview] + + + + Коментар - це більш поглиблений аналіз, написаний з метою привернути увагу до вже опублікованої роботи. Коментарі дещо схожі на "рецензії" в тому сенсі, що автор представляє свій аналіз роботи і пояснює, чому вона може бути цікавою для певної аудиторії. [Джерело: https://www.enago.com/academy/perspective-opinion-and-commentary-pieces] + + + + Оцінка наукової, академічної або професійної роботи іншими особами, які працюють у тій самій галузі. [Джерело: Взято з https://schema.datacite.org/meta/kernel-4.4/doc/DataCite-MetadataKernel_v4.4.pdf] + + + + + + Технічна документація - будь-який тип документації, що описує поводження, функціональність та архітектуру технічного продукту або продукту, що перебуває на стадії розробки чи використання. [Джерело: https://en.wikipedia.org/wiki/Technical_documentation] + + + + Книга, написана студентом, що містить офіційну презентацію результатів дослідження, яка подається на іспит після завершення курсу навчання у вищому навчальному закладі, щоб виконати вимоги для здобуття наукового ступеня. Також відома як дисертація. [Джерело: http://purl.org/spar/fabio/Thesis] + + + + Дипломна робота, що містить звіт про дослідницький проект, виконаний в рамках бакалаврського курсу навчання, що веде до здобуття ступеня бакалавра. [Джерело: http://purl.org/spar/fabio/BachelorsThesis] + + + + Дисертація, що містить звіт про дослідження, проведене під час навчання в аспірантурі для здобуття ступеня доктора наук. [Джерело: http://purl.org/spar/fabio/DoctoralThesis] + + + + Дипломна робота, в якій висвітлюється дослідницький проект, виконаний в рамках аспірантури, що веде до отримання ступеня магістра. [Джерело: http://purl.org/spar/fabio/MastersThesis] + + + + + + Письмовий запис слів, виголошених у судовому засіданні або у промові, інтерв'ю, радіо- чи телепередачі чи звукозаписі. [Джерело: адаптовано з https://products.abc-clio.com/ODLIS/odlis_t.aspx] + + + + Робочий або дискусійний документ, який поширюється публічно або серед групи колег. Деякі дисципліни, наприклад, економіка, випускають робочі документи серіями. [Джерело: http://www.ukoln.ac.uk/repositories/digirep/index/Eprints_Type_Vocabulary_Encoding_Scheme#:~:text=http%3A//purl.org/eprint/type/WorkingPaper] + + + + + + Тип ресурсу, який не включено до існуючих термінів. [Визначення COAR]. + + + + \ No newline at end of file diff --git a/dspace-api/src/test/data/dspaceFolder/config/crosswalks/template/virtual-field-doi-json.template b/dspace-api/src/test/data/dspaceFolder/config/crosswalks/template/virtual-field-doi-json.template new file mode 100644 index 000000000000..841a6a03fbd3 --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/crosswalks/template/virtual-field-doi-json.template @@ -0,0 +1,4 @@ +{ + "primary-doi": "@virtual.primary-doi.dc-identifier-doi@", + "alternative-doi": "@virtual.alternative-doi.dc-identifier-doi@", +} \ No newline at end of file diff --git a/dspace-api/src/test/data/dspaceFolder/config/crosswalks/template/virtual-field-vocabulary_18n-publication-with-vocabulary-xml.template b/dspace-api/src/test/data/dspaceFolder/config/crosswalks/template/virtual-field-vocabulary_18n-publication-with-vocabulary-xml.template new file mode 100644 index 000000000000..a8ca0b6f5b82 --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/crosswalks/template/virtual-field-vocabulary_18n-publication-with-vocabulary-xml.template @@ -0,0 +1,7 @@ + + @dspace.entity.type@ + @dc.title@ + @virtual.vocabulary_i18n.dc-type.publication-coar-types@ + @virtual.vocabulary_i18n.dc-language-iso.common_iso_languages@ + @virtual.vocabulary_i18n.organization-address-addressCountry.common_iso_countries@ + \ No newline at end of file diff --git a/dspace-api/src/test/data/dspaceFolder/config/crosswalks/template/virtual-field-vocabulary_18n-publication-xml.template b/dspace-api/src/test/data/dspaceFolder/config/crosswalks/template/virtual-field-vocabulary_18n-publication-xml.template new file mode 100644 index 000000000000..099f285a32e2 --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/crosswalks/template/virtual-field-vocabulary_18n-publication-xml.template @@ -0,0 +1,6 @@ + + @dspace.entity.type@ + @dc.title@ + @virtual.vocabulary_i18n.dc-type@ + @virtual.vocabulary_i18n.dc-language-iso@ + \ No newline at end of file diff --git a/dspace-api/src/test/data/dspaceFolder/config/entities/merge-relationship-types.xml b/dspace-api/src/test/data/dspaceFolder/config/entities/merge-relationship-types.xml new file mode 100644 index 000000000000..8db947319542 --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/entities/merge-relationship-types.xml @@ -0,0 +1,126 @@ + + + + + + Project + Project + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Person + Person + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Funding + Funding + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + OrgUnit + OrgUnit + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Journal + Journal + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Publication + Publication + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Product + Product + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Patent + Patent + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Event + Event + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Equipment + Equipment + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + \ No newline at end of file diff --git a/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml b/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml index 0b7def31ca3a..d94d1145ee45 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml @@ -26,6 +26,7 @@ + @@ -100,7 +101,7 @@ utils submission - + org.dspace.app.rest.submit.step.DescribeStep submission-form @@ -115,7 +116,7 @@ org.dspace.app.rest.submit.step.AccessConditionStep accessCondition - + submit.progressbar.accessCondition org.dspace.app.rest.submit.step.AccessConditionStep @@ -146,7 +147,7 @@ detect-duplicate - + submit.progressbar.ExtractMetadataStep org.dspace.app.rest.submit.step.ExtractMetadataStep extract @@ -214,7 +215,7 @@ org.dspace.app.rest.submit.step.DescribeStep submission-form - + submit.progressbar.sherpapolicy org.dspace.app.rest.submit.step.SherpaPolicyStep @@ -231,8 +232,8 @@ org.dspace.app.rest.submit.step.DescribeStep submission-form - - + + submit.progressbar.CustomUrlStep org.dspace.app.rest.submit.step.CustomUrlStep custom-url @@ -284,6 +285,17 @@ submission + + submit.progressbar.describe.green + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.green + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + @@ -329,7 +341,7 @@ - + @@ -337,7 +349,7 @@ - + @@ -414,7 +426,7 @@ - + @@ -436,11 +448,11 @@ - + - + @@ -456,6 +468,17 @@ + + + + + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/local.cfg b/dspace-api/src/test/data/dspaceFolder/config/local.cfg index 08eb98710584..2f3a64218dbf 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/local.cfg +++ b/dspace-api/src/test/data/dspaceFolder/config/local.cfg @@ -43,7 +43,7 @@ dspace.server.url = http://localhost db.driver = org.h2.Driver db.dialect=org.hibernate.dialect.H2Dialect # Use a 10 second database lock timeout to avoid occasional JDBC lock timeout errors -db.url = jdbc:h2:mem:test;LOCK_TIMEOUT=10000;NON_KEYWORDS=ROW\,VALUE +db.url = jdbc:h2:mem:test;LOCK_TIMEOUT=10000;NON_KEYWORDS=ROW\,VALUE;TIME ZONE=UTC;DB_CLOSE_ON_EXIT=FALSE db.username = sa db.password = # H2's default schema is PUBLIC @@ -157,11 +157,11 @@ wos.apiKey = submission.lookup.epo.consumerKey= submission.lookup.epo.consumerSecretKey= -event.dispatcher.default.consumers = versioning, discovery, eperson, dedup, crisconsumer, audit, nbeventsdelete, referenceresolver, orcidwebhook, iiif, itemenhancer, customurl, reciprocal +event.dispatcher.default.consumers = versioning, discovery, eperson, dedup, crisconsumer, audit, nbeventsdelete, referenceresolver, orcidwebhook, iiif, itemenhancer, customurl, reciprocal, filetypemetadataenhancer # setup a dispatcher also with the cris consumer event.dispatcher.cris-default.class = org.dspace.event.BasicDispatcher -event.dispatcher.cris-default.consumers = versioning, discovery, eperson, dedup, crisconsumer, orcidqueue, audit, referenceresolver, orcidwebhook, itemenhancer, customurl +event.dispatcher.cris-default.consumers = versioning, discovery, eperson, dedup, crisconsumer, orcidqueue, audit, referenceresolver, orcidwebhook, itemenhancer, customurl, filetypemetadataenhancer # Enable a test authority control on dc.language.iso field choices.plugin.dc.language.iso = common_iso_languages @@ -213,4 +213,17 @@ logging.server.include-stacktrace-for-httpcode = 400, 401, 404, 403, 422 # Configuration required for thorough testing of browse links webui.browse.link.1 = author:dc.contributor.* -webui.browse.link.2 = subject:dc.subject.* \ No newline at end of file +webui.browse.link.2 = subject:dc.subject.* + +# Enable researcher profiles and orcid synchronization for tests +researcher-profile.entity-type = Person +orcid.synchronization-enabled = true + +# Configuration settings required for Researcher Profiles +# These settings ensure "dspace.object.owner" field are indexed by Authority Control +choices.plugin.dspace.object.owner = EPersonAuthority +choices.presentation.dspace.object.owner = suggest +authority.controlled.dspace.object.owner = true + +# force the event system to work synchronously during test +system-event.thread.size = 0 \ No newline at end of file diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/access-conditions.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/access-conditions.xml index 3aad934931dc..6962418da96a 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/access-conditions.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/access-conditions.xml @@ -100,4 +100,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/identifier-service.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/identifier-service.xml index 206b801d0842..3056b535032e 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/identifier-service.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/identifier-service.xml @@ -19,7 +19,18 @@ + scope="singleton"> + + + + + + + + + + + @@ -240,7 +240,7 @@ it, please enter the types and the actual numbers or codes. -
      + dc @@ -380,7 +380,7 @@ it, please enter the types and the actual numbers or codes.
      -
      + dc @@ -432,7 +432,7 @@ it, please enter the types and the actual numbers or codes.
      -
      + dc @@ -484,7 +484,7 @@ it, please enter the types and the actual numbers or codes.
      -
      + dc @@ -536,7 +536,7 @@ it, please enter the types and the actual numbers or codes.
      -
      + isJournalOfVolume @@ -1038,8 +1038,8 @@ it, please enter the types and the actual numbers or codes.
      - -
      + + dc @@ -1060,7 +1060,7 @@ it, please enter the types and the actual numbers or codes. false The Approval date. -You can leave out the day and/or month if they aren't applicable. + You can leave out the day and/or month if they aren't applicable. @@ -1073,7 +1073,7 @@ You can leave out the day and/or month if they aren't applicable. false The registration date of the patent. -You can leave out the day and/or month if they aren't applicable. + You can leave out the day and/or month if they aren't applicable. @@ -1169,8 +1169,8 @@ You can leave out the day and/or month if they aren't applicable. The date Filled. -
      -
      +
      +
      dc @@ -1206,8 +1206,8 @@ You can leave out the day and/or month if they aren't applicable. Enter the description of the patent. -
      -
      +
      +
      dc @@ -1243,8 +1243,8 @@ You can leave out the day and/or month if they aren't applicable. Result outputs that are referenced by this patent -
      - + +
      @@ -1528,6 +1528,54 @@ You can leave out the day and/or month if they aren't applicable.
      +
      + + + oairecerif + affiliation + role + false + + + onebox + + + + + oairecerif + person + affiliation + false + + + onebox + + You must enter at least the organisation of your affiliation. + + + oairecerif + affiliation + startDate + false + + + date + + + + + oairecerif + affiliation + endDate + false + + + date + + + + +
      @@ -1638,7 +1686,7 @@ You can leave out the day and/or month if they aren't applicable.
      - +
      @@ -1654,8 +1702,8 @@ You can leave out the day and/or month if they aren't applicable.
      - -
      + + dc @@ -1723,8 +1771,8 @@ You can leave out the day and/or month if they aren't applicable.
      - -
      + + dc @@ -1769,14 +1817,14 @@ You can leave out the day and/or month if they aren't applicable. onebox false - srsc - - Select a subject from the vocabulary. + You must select a publication type + Select the type(s) of content of the item. + publication-coar-types -
      + -
      + dc @@ -1812,9 +1860,9 @@ You can leave out the day and/or month if they aren't applicable. -
      + -
      + dc @@ -1862,8 +1910,8 @@ You can leave out the day and/or month if they aren't applicable. You must enter at least the year. -
      -
      +
      +
      dc @@ -1877,8 +1925,8 @@ You can leave out the day and/or month if they aren't applicable. -
      - + +
      @@ -2025,6 +2073,293 @@ You can leave out the day and/or month if they aren't applicable.
      +
      + + + dc + identifier + true + + qualdrop_value + If the item has any identification numbers or codes associated with + it, please enter the types and the actual numbers or codes. + + + + + + + dc + title + false + + onebox + Enter the main title of the item. + You must enter a main title for this item. + + + + + dc + title + alternative + true + + onebox + If the item has any alternative titles, please enter them here. + + + + + + dc + date + issued + false + + date + Please give the date of previous publication or public distribution. + You can leave out the day and/or month if they aren't + applicable. + + You must enter at least the year. + + + + + dc + contributor + author + true + + group + Enter the names of the authors of this item. + + + + + + dc + contributor + group + true + + onebox + The editors of this publication. + + + + + + dc + type + false + + onebox + Select the type(s) of content of the item. + You must select a publication type + publication-coar-types + + +
      +
      + + + dc + contributor + author + false + + onebox + Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh + or Smith, J]. + + You must enter at least the author. + + + + + oairecerif + author + affiliation + false + + onebox + Enter the affiliation of the author as stated on the publication. + + + +
      +
      + + + dc + contributor + editor + false + + onebox + The editors of this publication. + You must enter at least the author. + + + + + oairecerif + editor + affiliation + false + + onebox + Enter the affiliation of the editor as stated on the publication. + + + +
      + +
      + + + dc + identifier + true + + qualdrop_value + If the item has any identification numbers or codes associated with + it, please enter the types and the actual numbers or codes. + + + + + + + dc + title + false + + onebox + Enter the main title of the item. + You must enter a main title for this item. + + + + + dc + title + alternative + true + + onebox + If the item has any alternative titles, please enter them here. + + + + + + dc + date + issued + false + + date + Please give the date of previous publication or public distribution. + You can leave out the day and/or month if they aren't + applicable. + + You must enter at least the year. + + + + + dc + contributor + author + true + + group + Enter the names of the authors of this item. + + + + + + dc + contributor + group + true + + onebox + The editors of this publication. + + + + + + dc + type + false + + onebox + Select the type(s) of content of the item. + You must select a publication type + publication-coar-types + + +
      +
      + + + dc + contributor + author + false + + onebox + Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, + J]. + + You must enter at least the author. + + + + + oairecerif + author + affiliation + false + + onebox + Enter the affiliation of the author as stated on the publication. + + + +
      +
      + + + dc + contributor + editor + false + + onebox + The editors of this publication. + You must enter at least the author. + + + + + oairecerif + editor + affiliation + false + + onebox + Enter the affiliation of the editor as stated on the publication. + + + +
      diff --git a/dspace-api/src/test/data/dspaceFolder/config/submission-forms_it.xml b/dspace-api/src/test/data/dspaceFolder/config/submission-forms_it.xml index 20a1a7157b4f..4ad3759e19aa 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/submission-forms_it.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/submission-forms_it.xml @@ -94,14 +94,14 @@
      - dc - type - - onebox - false - Devi selezionare un tipo di pubblicazione - Seleziona il tipo di contenuto della pubblicazione. - srsc_it + dc + type + + onebox + false + You must select a publication type + Select the type(s) of content of the item. + publication-coar-types_it diff --git a/dspace-api/src/test/data/dspaceFolder/config/submission-forms_uk.xml b/dspace-api/src/test/data/dspaceFolder/config/submission-forms_uk.xml index b6eebe4ef3b7..244c26204649 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/submission-forms_uk.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/submission-forms_uk.xml @@ -99,7 +99,7 @@ false Ви повинні вибрати тип публікації Виберіть тип вмісту публікації. - srsc_uk + publication-coar-types_uk diff --git a/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java b/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java index a41e985deb32..1134990e84f4 100644 --- a/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java +++ b/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java @@ -273,6 +273,8 @@ public void testWithEmbargo() throws Exception { context.restoreAuthSystemState(); String status = helper.getAccessStatusFromItem(context, itemWithEmbargo, threshold); assertThat("testWithEmbargo 0", status, equalTo(DefaultAccessStatusHelper.EMBARGO)); + String embargoDate = helper.getEmbargoFromItem(context, itemWithEmbargo, threshold); + assertThat("testWithEmbargo 1", embargoDate, equalTo(policy.getStartDate().toString())); } /** @@ -390,6 +392,8 @@ public void testWithPrimaryAndMultipleBitstreams() throws Exception { context.restoreAuthSystemState(); String status = helper.getAccessStatusFromItem(context, itemWithPrimaryAndMultipleBitstreams, threshold); assertThat("testWithPrimaryAndMultipleBitstreams 0", status, equalTo(DefaultAccessStatusHelper.EMBARGO)); + String embargoDate = helper.getEmbargoFromItem(context, itemWithPrimaryAndMultipleBitstreams, threshold); + assertThat("testWithPrimaryAndMultipleBitstreams 1", embargoDate, equalTo(policy.getStartDate().toString())); } /** @@ -419,5 +423,7 @@ public void testWithNoPrimaryAndMultipleBitstreams() throws Exception { context.restoreAuthSystemState(); String status = helper.getAccessStatusFromItem(context, itemWithoutPrimaryAndMultipleBitstreams, threshold); assertThat("testWithNoPrimaryAndMultipleBitstreams 0", status, equalTo(DefaultAccessStatusHelper.OPEN_ACCESS)); + String embargoDate = helper.getEmbargoFromItem(context, itemWithEmbargo, threshold); + assertThat("testWithNoPrimaryAndMultipleBitstreams 1", embargoDate, equalTo(null)); } } diff --git a/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java new file mode 100644 index 000000000000..60a0d629debc --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java @@ -0,0 +1,1860 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +import static org.dspace.app.matcher.ResourcePolicyMatcher.matches; +import static org.dspace.authorize.ResourcePolicy.TYPE_CUSTOM; +import static org.dspace.authorize.ResourcePolicy.TYPE_INHERITED; +import static org.dspace.core.Constants.CONTENT_BUNDLE_NAME; +import static org.dspace.core.Constants.DEFAULT_BUNDLE_NAME; +import static org.dspace.core.Constants.READ; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasItems; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.apache.commons.codec.CharEncoding; +import org.apache.commons.io.IOUtils; +import org.apache.commons.io.file.PathUtils; +import org.apache.commons.lang3.ArrayUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.launcher.ScriptLauncher; +import org.dspace.app.mediafilter.FormatFilter; +import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory; +import org.dspace.app.mediafilter.service.MediaFilterService; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.BundleBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.GroupBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.core.Constants; +import org.dspace.core.SelfNamedPlugin; +import org.dspace.core.factory.CoreServiceFactory; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.discovery.SearchUtils; +import org.dspace.discovery.indexobject.IndexableItem; +import org.dspace.eperson.Group; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.GroupService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +/** + * Basic integration testing for the Bulk Access conditions Feature{@link BulkAccessControl}. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class BulkAccessControlIT extends AbstractIntegrationTestWithDatabase { + + //key (in dspace.cfg) which lists all enabled filters by name + private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins"; + + //prefix (in dspace.cfg) for all filter properties + private static final String FILTER_PREFIX = "filter"; + + //suffix (in dspace.cfg) for input formats supported by each filter + private static final String INPUT_FORMATS_SUFFIX = "inputFormats"; + + private Path tempDir; + private String tempFilePath; + + private GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + private SearchService searchService = SearchUtils.getSearchService(); + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + + @Before + @Override + public void setUp() throws Exception { + + super.setUp(); + + tempDir = Files.createTempDirectory("bulkAccessTest"); + tempFilePath = tempDir + "/bulk-access.json"; + } + + @After + @Override + public void destroy() throws Exception { + PathUtils.deleteDirectory(tempDir); + super.destroy(); + } + + @Test + public void performBulkAccessWithAnonymousEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("An eperson to do the the Bulk Access Control must be specified") + )); + } + + @Test + public void performBulkAccessWithNotExistingEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + String randomUUID = UUID.randomUUID().toString(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", randomUUID}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("EPerson cannot be found: " + randomUUID) + )); + } + + @Test + public void performBulkAccessWithNotAdminEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", eperson.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("Current user is not eligible to execute script bulk-access-control") + )); + } + + @Test + public void performBulkAccessWithCommunityAdminEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .withAdminGroup(eperson) + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", community.getID().toString(), "-f", tempFilePath, + "-e", eperson.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(0)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + } + + @Test + public void performBulkAccessWithCollectionAdminEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .withAdminGroup(eperson) + .build(); + + ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", collection.getID().toString(), "-f", tempFilePath, + "-e", eperson.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(0)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + } + + @Test + public void performBulkAccessWithItemAdminEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).withAdminUser(eperson).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", eperson.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(0)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + } + + @Test + public void performBulkAccessWithNotCollectionAdminEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + // add eperson to admin group + Collection collectionOne = CollectionBuilder.createCollection(context, community) + .withName("collection") + .withAdminGroup(eperson) + .build(); + + Collection collectionTwo = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + ItemBuilder.createItem(context, collectionOne).build(); + ItemBuilder.createItem(context, collectionTwo).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", + "-u", collectionOne.getID().toString(), + "-u", collectionTwo.getID().toString(), + "-f", tempFilePath, + "-e", eperson.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("Current user is not eligible to execute script bulk-access-control") + )); + } + + @Test + public void performBulkAccessWithNotCommunityAdminEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // add eperson to admin group + Community communityOne = CommunityBuilder.createCommunity(context) + .withName("community") + .withAdminGroup(eperson) + .build(); + + Community communityTwo = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", + "-u", communityOne.getID().toString(), + "-u", communityTwo.getID().toString(), + "-f", tempFilePath, + "-e", eperson.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("Current user is not eligible to execute script bulk-access-control") + )); + } + + @Test + public void performBulkAccessWithNotItemAdminEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + // add eperson to admin group + Item itemOne = ItemBuilder.createItem(context, collection) + .withAdminUser(eperson) + .build(); + + Item itemTwo = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", + "-u", itemOne.getID().toString(), + "-u", itemTwo.getID().toString(), + "-f", tempFilePath, + "-e", eperson.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("Current user is not eligible to execute script bulk-access-control") + )); + } + + @Test + public void performBulkAccessWithoutRequiredParamTest() throws Exception { + + buildJsonFile(""); + + String[] args = new String[] {"bulk-access-control", "-f", tempFilePath, "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("A target uuid must be provided with at least on uuid") + )); + } + + @Test + public void performBulkAccessWithEmptyJsonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).withTitle("title").build(); + + context.restoreAuthSystemState(); + + buildJsonFile(""); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("Error parsing json file") + )); + } + + @Test + public void performBulkAccessWithWrongModeOfItemValueTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"wrong\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("wrong value for item mode") + )); + } + + @Test + public void performBulkAccessWithMissingModeOfItemValueTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("item mode node must be provided") + )); + } + + @Test + public void performBulkAccessWithWrongModeOfBitstreamValueTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"bitstream\": {\n" + + " \"mode\": \"wrong\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("wrong value for bitstream mode") + )); + } + + @Test + public void performBulkAccessWithMissingModeOfBitstreamValueTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"bitstream\": {\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("bitstream mode node must be provided") + )); + } + + @Test + public void performBulkAccessWithNotFoundAccessConditionNameTest() throws Exception { + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"wrongAccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("wrong access condition ") + )); + } + + @Test + public void performBulkAccessWithInvalidEmbargoAccessConditionDateTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"endDate\": \"2024-06-24T00:00:00Z\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(jsonOne); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(3)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("invalid access condition, The access condition embargo requires a start date.") + )); + } + + @Test + public void performBulkAccessWithInvalidLeaseAccessConditionDateTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"lease\",\n" + + " \"startDate\": \"2024-06-24T00:00:00Z\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(3)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("invalid access condition, The access condition lease requires an end date.") + )); + } + + @Test + public void performBulkAccessForCommunityItemsWithBitstreamConstraintsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community communityOne = CommunityBuilder.createCommunity(context) + .withName("community one") + .build(); + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"bitstream\": {\n" + + " \"constraints\": {\n" + + " \"uuid\": [\"" + UUID.randomUUID() + "\"]\n" + + " },\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24\"\n" + + " }\n" + + " ]\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = + new String[] {"bulk-access-control", + "-u", communityOne.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("constraint is not supported when uuid isn't an Item") + )); + } + + @Test + public void performBulkAccessForMultipleItemsWithBitstreamConstraintsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community communityOne = CommunityBuilder.createCommunity(context) + .withName("community one") + .build(); + + Community communityTwo = CommunityBuilder.createCommunity(context) + .withName("community two") + .build(); + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"bitstream\": {\n" + + " \"constraints\": {\n" + + " \"uuid\": [\"" + UUID.randomUUID() + "\"]\n" + + " },\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24\"\n" + + " }\n" + + " ]\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = + new String[] {"bulk-access-control", + "-u", communityOne.getID().toString(), + "-u", communityTwo.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("constraint isn't supported when multiple uuids are provided") + )); + } + + @Test + public void performBulkAccessForSingleItemWithBitstreamConstraintsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community one") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + Bundle bundle = BundleBuilder.createBundle(context, item) + .withName("ORIGINAL") + .build(); + + String bitstreamOneContent = "Dummy content one"; + Bitstream bitstreamOne; + try (InputStream is = IOUtils.toInputStream(bitstreamOneContent, CharEncoding.UTF_8)) { + bitstreamOne = BitstreamBuilder.createBitstream(context, bundle, is) + .withName("bistream one") + .build(); + } + + String bitstreamTwoContent = "Dummy content of bitstream two"; + Bitstream bitstreamTwo; + try (InputStream is = IOUtils.toInputStream(bitstreamTwoContent, CharEncoding.UTF_8)) { + bitstreamTwo = BitstreamBuilder.createBitstream(context, bundle, is) + .withName("bistream two") + .build(); + } + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"bitstream\": {\n" + + " \"constraints\": {\n" + + " \"uuid\": [\"" + bitstreamOne.getID().toString() + "\"]\n" + + " },\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24\"\n" + + " }\n" + + " ]\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = + new String[] {"bulk-access-control", + "-u", item.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(1)); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasItem( + containsString("Replacing Bitstream {" + bitstreamOne.getID() + + "} policy to access conditions:{embargo, start_date=2024-06-24}"))); + + bitstreamOne = context.reloadEntity(bitstreamOne); + bitstreamTwo = context.reloadEntity(bitstreamTwo); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + assertThat(bitstreamOne.getResourcePolicies(), hasSize(1)); + assertThat(bitstreamOne.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) + )); + + assertThat(bitstreamTwo.getResourcePolicies(), hasSize(1)); + assertThat(bitstreamTwo.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED) + )); + } + + @Test + public void performBulkAccessWithAddModeAndEmptyAccessConditionsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community parentCommunity = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"add\"\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = + new String[] {"bulk-access-control", "-u", parentCommunity.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("accessConditions of item must be provided with mode") + )); + } + + @Test + public void performBulkAccessWithValidJsonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community parentCommunity = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Community subCommunityOne = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community one") + .build(); + + Community subCommunityTwo = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community two") + .build(); + + Community subCommunityThree = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community three") + .build(); + + Collection collectionOne = CollectionBuilder.createCollection(context, subCommunityOne) + .withName("collection one") + .build(); + + Collection collectionTwo = CollectionBuilder.createCollection(context, subCommunityTwo) + .withName("collection two") + .build(); + + Collection collectionThree = CollectionBuilder.createCollection(context, subCommunityThree) + .withName("collection three") + .build(); + + Item itemOne = ItemBuilder.createItem(context, collectionOne).build(); + + Item itemTwo = ItemBuilder.createItem(context, collectionTwo).build(); + + Item itemThree = ItemBuilder.createItem(context, collectionThree).withTitle("item three title").build(); + + Item itemFour = ItemBuilder.createItem(context, collectionThree).withTitle("item four title").build(); + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(jsonOne); + + String[] args = new String[] { + "bulk-access-control", + "-u", subCommunityOne.getID().toString(), + "-u", collectionTwo.getID().toString(), + "-u", itemThree.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail() + }; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(3)); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), containsInAnyOrder( + containsString("Replacing Item {" + itemOne.getID() + + "} policy to access conditions:{embargo, start_date=2024-06-24}"), + containsString("Replacing Item {" + itemTwo.getID() + + "} policy to access conditions:{embargo, start_date=2024-06-24}"), + containsString("Replacing Item {" + itemThree.getID() + + "} policy to access conditions:{embargo, start_date=2024-06-24}") + )); + + itemOne = context.reloadEntity(itemOne); + itemTwo = context.reloadEntity(itemTwo); + itemThree = context.reloadEntity(itemThree); + itemFour = context.reloadEntity(itemFour); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + assertThat(itemOne.getResourcePolicies(), hasSize(1)); + assertThat(itemOne.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) + )); + + assertThat(itemTwo.getResourcePolicies(), hasSize(1)); + assertThat(itemTwo.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) + )); + + assertThat(itemThree.getResourcePolicies(), hasSize(1)); + assertThat(itemThree.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) + )); + + assertThat(itemFour.getResourcePolicies().size(), is(1)); + assertThat(itemFour.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED) + )); + + + + + } + + @Test + public void performBulkAccessWithReplaceModeAndEmptyAccessConditionsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + Community parentCommunity = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Community subCommunityOne = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community one") + .build(); + + Community subCommunityTwo = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community two") + .build(); + + Collection collectionOne = CollectionBuilder.createCollection(context, subCommunityOne) + .withName("collection one") + .build(); + + Collection collectionTwo = CollectionBuilder.createCollection(context, subCommunityTwo) + .withName("collection two") + .build(); + + for (int i = 0; i < 20 ; i++) { + ItemBuilder.createItem(context, collectionOne).build(); + } + + for (int i = 0; i < 5 ; i++) { + Item item = ItemBuilder.createItem(context, collectionTwo).build(); + + Bundle bundle = BundleBuilder.createBundle(context, item) + .withName("ORIGINAL") + .build(); + + String bitstreamContent = "Dummy content"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + BitstreamBuilder.createBitstream(context, bundle, is) + .withName("bistream") + .build(); + } + } + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"replace\"\n" + + " },\n" + + " \"bitstream\": {\n" + + " \"mode\": \"replace\"\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = new String[] { + "bulk-access-control", + "-u", subCommunityOne.getID().toString(), + "-u", collectionTwo.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail() + }; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(60)); + + List itemsOfSubCommOne = findItems("location.comm:" + subCommunityOne.getID()); + List itemsOfSubCommTwo = findItems("location.comm:" + subCommunityTwo.getID()); + + assertThat(itemsOfSubCommOne, hasSize(10)); + assertThat(itemsOfSubCommTwo, hasSize(5)); + + assertThat(itemsOfSubCommOne.stream() + .flatMap(item -> findAllBitstreams(item).stream()) + .count(), is(0L)); + + assertThat(itemsOfSubCommTwo.stream() + .flatMap(item -> findAllBitstreams(item).stream()) + .count(), is(5L)); + + for (Item item : itemsOfSubCommOne) { + assertThat(item.getResourcePolicies(), hasSize(1)); + assertThat(item.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED) + )); + } + + for (Item item : itemsOfSubCommTwo) { + assertThat(item.getResourcePolicies(), hasSize(1)); + assertThat(item.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED) + )); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasItems( + containsString("Cleaning Item {" + item.getID() + "} policies"), + containsString("Inheriting policies from owning Collection in Item {" + item.getID() + "") + )); + + List bitstreams = findAllBitstreams(item); + + for (Bitstream bitstream : bitstreams) { + assertThat(bitstream.getResourcePolicies(), hasSize(1)); + assertThat(bitstream.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED) + )); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasItems( + containsString("Cleaning Bitstream {" + bitstream.getID() + "} policies"), + containsString("Inheriting policies from owning Collection in Bitstream {" + bitstream.getID() + "") + )); + } + } + } + + @Test + public void performBulkAccessWithAddModeTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + Community parentCommunity = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Community subCommunityOne = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community one") + .build(); + + Collection collectionOne = CollectionBuilder.createCollection(context, subCommunityOne) + .withName("collection one") + .build(); + + for (int i = 0; i < 5 ; i++) { + + Item item = ItemBuilder.createItem(context, collectionOne).build(); + + Bundle bundle = BundleBuilder.createBundle(context, item) + .withName("ORIGINAL") + .build(); + + String bitstreamContent = "Dummy content"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + BitstreamBuilder.createBitstream(context, bundle, is) + .withName("bistream") + .build(); + } + } + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " },\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"bitstream\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " },\n" + + " {\n" + + " \"name\": \"lease\",\n" + + " \"endDate\": \"2023-06-24\"\n" + + " }\n" + + " ]\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = new String[] { + "bulk-access-control", + "-u", subCommunityOne.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail() + }; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(10)); + + List itemsOfSubCommOne = findItems("location.comm:" + subCommunityOne.getID()); + + assertThat(itemsOfSubCommOne, hasSize(5)); + + assertThat(itemsOfSubCommOne.stream() + .flatMap(item -> findAllBitstreams(item).stream()) + .count(), is(5L)); + + for (Item item : itemsOfSubCommOne) { + assertThat(item.getResourcePolicies(), hasSize(3)); + assertThat(item.getResourcePolicies(), containsInAnyOrder( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED), + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) + )); + + List bitstreams = findAllBitstreams(item); + + for (Bitstream bitstream : bitstreams) { + assertThat(bitstream.getResourcePolicies(), hasSize(3)); + assertThat(bitstream.getResourcePolicies(), containsInAnyOrder( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED), + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), + matches(Constants.READ, anonymousGroup, "lease", TYPE_CUSTOM, null, "2023-06-24", null) + )); + } + } + } + + @Test + public void performBulkAccessWithReplaceModeTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + Community parentCommunity = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Community subCommunityOne = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community one") + .build(); + + Collection collectionOne = CollectionBuilder.createCollection(context, subCommunityOne) + .withName("collection one") + .build(); + + for (int i = 0; i < 3 ; i++) { + + Item item = ItemBuilder.createItem(context, collectionOne).build(); + + Bundle bundle = BundleBuilder.createBundle(context, item) + .withName("ORIGINAL") + .build(); + + String bitstreamContent = "Dummy content"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + BitstreamBuilder.createBitstream(context, bundle, is) + .withName("bistream") + .build(); + } + } + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " },\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"bitstream\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " },\n" + + " {\n" + + " \"name\": \"lease\",\n" + + " \"endDate\": \"2023-06-24\"\n" + + " }\n" + + " ]\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = new String[] { + "bulk-access-control", + "-u", subCommunityOne.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail() + }; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(6)); + + List itemsOfSubCommOne = findItems("location.comm:" + subCommunityOne.getID()); + + assertThat(itemsOfSubCommOne, hasSize(3)); + + assertThat(itemsOfSubCommOne.stream() + .flatMap(item -> findAllBitstreams(item).stream()) + .count(), is(3L)); + + for (Item item : itemsOfSubCommOne) { + assertThat(item.getResourcePolicies(), hasSize(2)); + assertThat(item.getResourcePolicies(), containsInAnyOrder( + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) + )); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasItem( + containsString("Replacing Item {" + item.getID() + + "} policy to access conditions:{openaccess, embargo, start_date=2024-06-24}") + )); + + List bitstreams = findAllBitstreams(item); + + for (Bitstream bitstream : bitstreams) { + assertThat(bitstream.getResourcePolicies(), hasSize(2)); + assertThat(bitstream.getResourcePolicies(), containsInAnyOrder( + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), + matches(Constants.READ, anonymousGroup, "lease", TYPE_CUSTOM, null, "2023-06-24", null) + )); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasItem( + containsString("Replacing Bitstream {" + bitstream.getID() + + "} policy to access conditions:{openaccess, lease, end_date=2023-06-24}") + )); + } + } + } + + @Test + public void performBulkAccessAndCheckDerivativeBitstreamsPoliciesTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + Community parentCommunity = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Community subCommunityOne = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community one") + .build(); + + Collection collectionOne = CollectionBuilder.createCollection(context, subCommunityOne) + .withName("collection one") + .build(); + + Item item = ItemBuilder.createItem(context, collectionOne).build(); + + Bundle bundle = BundleBuilder.createBundle(context, item) + .withName("ORIGINAL") + .build(); + + String bitstreamContent = "Dummy content"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + BitstreamBuilder.createBitstream(context, bundle, is) + .withName("bitstream") + .withFormat("TEXT") + .withMimeType("text/plain") + .build(); + } + + List formatFilters = new ArrayList<>(); + Map> filterFormats = new HashMap<>(); + MediaFilterService mediaFilterService = MediaFilterServiceFactory.getInstance().getMediaFilterService(); + + String[] filterNames = + DSpaceServicesFactory.getInstance() + .getConfigurationService() + .getArrayProperty(MEDIA_FILTER_PLUGINS_KEY); + + + for (int i = 0; i < filterNames.length; i++) { + + //get filter of this name & add to list of filters + FormatFilter filter = + (FormatFilter) CoreServiceFactory.getInstance() + .getPluginService() + .getNamedPlugin(FormatFilter.class, filterNames[i]); + formatFilters.add(filter); + + String filterClassName = filter.getClass().getName(); + + String pluginName = null; + + if (SelfNamedPlugin.class.isAssignableFrom(filter.getClass())) { + //Get the plugin instance name for this class + pluginName = ((SelfNamedPlugin) filter).getPluginInstanceName(); + } + + String[] formats = + DSpaceServicesFactory.getInstance().getConfigurationService().getArrayProperty( + FILTER_PREFIX + "." + filterClassName + + (pluginName != null ? "." + pluginName : "") + + "." + INPUT_FORMATS_SUFFIX); + + //add to internal map of filters to supported formats + if (ArrayUtils.isNotEmpty(formats)) { + filterFormats.put(filterClassName + + (pluginName != null ? MediaFilterService.FILTER_PLUGIN_SEPARATOR + + pluginName : ""), + Arrays.asList(formats)); + } + } + + mediaFilterService.setFilterClasses(formatFilters); + mediaFilterService.setFilterFormats(filterFormats); + + // here will create derivative bitstreams + mediaFilterService.applyFiltersItem(context, item); + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " },\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"bitstream\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " },\n" + + " {\n" + + " \"name\": \"lease\",\n" + + " \"endDate\": \"2023-06-24\"\n" + + " }\n" + + " ]\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = new String[] { + "bulk-access-control", + "-u", subCommunityOne.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail() + }; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(2)); + + item = context.reloadEntity(item); + + Bundle originalBundle = item.getBundles(DEFAULT_BUNDLE_NAME).get(0); + Bundle textBundle = item.getBundles("TEXT").get(0); + + assertThat(item.getResourcePolicies(), hasSize(2)); + assertThat(item.getResourcePolicies(), containsInAnyOrder( + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) + )); + + assertThat(originalBundle.getBitstreams().get(0).getResourcePolicies(), hasSize(2)); + assertThat(originalBundle.getBitstreams().get(0).getResourcePolicies(), containsInAnyOrder( + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), + matches(Constants.READ, anonymousGroup, "lease", TYPE_CUSTOM, null, "2023-06-24", null) + )); + + assertThat(textBundle.getBitstreams().get(0).getResourcePolicies(), hasSize(2)); + assertThat(textBundle.getBitstreams().get(0).getResourcePolicies(), containsInAnyOrder( + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), + matches(Constants.READ, anonymousGroup, "lease", TYPE_CUSTOM, null, "2023-06-24", null) + )); + } + + @Test + public void performBulkAccessWithReplaceModeAndAppendModeIsEnabledTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group group = GroupBuilder.createGroup(context).withName("special network").build(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection one") + .withDefaultItemRead(group) + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(jsonOne); + + String[] args = new String[] { + "bulk-access-control", + "-u", item.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail() + }; + + try { + configurationService.setProperty("core.authorization.installitem.inheritance-read.append-mode", true); + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), + testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(2)); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), containsInAnyOrder( + containsString("Replacing Item {" + item.getID() + "} policy to access conditions:" + + "{embargo, start_date=2024-06-24}"), + containsString("Inheriting policies from owning Collection in Item {" + item.getID() + "}") + )); + + item = context.reloadEntity(item); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + assertThat(item.getResourcePolicies(), hasSize(2)); + assertThat(item.getResourcePolicies(), containsInAnyOrder( + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null), + matches(Constants.READ, group, TYPE_INHERITED) + )); + } finally { + configurationService.setProperty("core.authorization.installitem.inheritance-read.append-mode", false); + } + } + + @Test + public void performBulkAccessWithReplaceModeOnItemsWithMultipleBundlesTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group adminGroup = groupService.findByName(context, Group.ADMIN); + + Community parentCommunity = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Community subCommunity = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community one") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, subCommunity) + .withName("collection one") + .build(); + + Item itemOne = ItemBuilder.createItem(context, collection).build(); + Item itemTwo = ItemBuilder.createItem(context, collection).build(); + ItemBuilder.createItem(context, collection).build(); + + Bundle bundleOne = BundleBuilder.createBundle(context, itemOne) + .withName("ORIGINAL") + .build(); + + Bundle bundleTwo = BundleBuilder.createBundle(context, itemTwo) + .withName("ORIGINAL") + .build(); + + BundleBuilder.createBundle(context, itemTwo) + .withName("ORIGINAL") + .build(); + + BundleBuilder.createBundle(context, itemOne) + .withName("TEXT") + .build(); + + Bitstream bitstreamOne; + Bitstream bitstreamTwo; + String bitstreamContent = "Dummy content"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstreamOne = + BitstreamBuilder.createBitstream(context, bundleOne, is) + .withName("bistream of bundle one") + .build(); + } + + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstreamTwo = + BitstreamBuilder.createBitstream(context, bundleTwo, is) + .withName("bitstream of bundle two") + .build(); + } + + context.restoreAuthSystemState(); + + String jsonOne = "{\n" + + " \"bitstream\": {\n" + + " \"constraints\": {\n" + + " \"uuid\": []\n" + + " },\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"administrator\",\n" + + " \"startDate\": null,\n" + + " \"endDate\": null\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + + buildJsonFile(jsonOne); + + String[] args = new String[] { + "bulk-access-control", + "-u", subCommunity.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail() + }; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(2)); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), containsInAnyOrder( + containsString("Replacing Bitstream {" + bitstreamOne.getID() + + "} policy to access conditions:{administrator}"), + containsString("Replacing Bitstream {" + bitstreamTwo.getID() + + "} policy to access conditions:{administrator}") + )); + + bitstreamOne = context.reloadEntity(bitstreamOne); + bitstreamTwo = context.reloadEntity(bitstreamTwo); + + assertThat(bitstreamOne.getResourcePolicies(), hasSize(1)); + assertThat(bitstreamOne.getResourcePolicies(), hasItem( + matches(READ, adminGroup, "administrator", TYPE_CUSTOM) + )); + + assertThat(bitstreamTwo.getResourcePolicies(), hasSize(1)); + assertThat(bitstreamTwo.getResourcePolicies(), hasItem( + matches(READ, adminGroup, "administrator", TYPE_CUSTOM) + )); + } + + @Test + public void performBulkAccessWithHelpParamTest() throws Exception { + + String[] args = new String[] {"bulk-access-control", "-h"}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + } + + private List findItems(String query) throws SearchServiceException { + + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setDSpaceObjectFilter(IndexableItem.TYPE); + discoverQuery.setQuery(query); + + return searchService.search(context, discoverQuery) + .getIndexableObjects() + .stream() + .map(indexableObject -> + ((IndexableItem) indexableObject).getIndexedObject()) + .collect(Collectors.toList()); + } + + private List findAllBitstreams(Item item) { + return item.getBundles(CONTENT_BUNDLE_NAME) + .stream() + .flatMap(bundle -> bundle.getBitstreams().stream()) + .collect(Collectors.toList()); + } + + private void buildJsonFile(String json) throws IOException { + File file = new File(tempDir + "/bulk-access.json"); + Path path = Paths.get(file.getAbsolutePath()); + Files.writeString(path, json, StandardCharsets.UTF_8); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/bulkedit/BulkImportIT.java b/dspace-api/src/test/java/org/dspace/app/bulkedit/BulkImportIT.java index e03c414a034c..e1d04f314c68 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkedit/BulkImportIT.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkedit/BulkImportIT.java @@ -133,7 +133,8 @@ public void beforeTests() throws SQLException, AuthorizeException { public void testEmptyImport() throws InstantiationException, IllegalAccessException { String fileLocation = getXlsFilePath("empty.xls"); - String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -149,7 +150,8 @@ public void testEmptyImport() throws InstantiationException, IllegalAccessExcept public void testEmptyHeadersImport() throws InstantiationException, IllegalAccessException { String fileLocation = getXlsFilePath("empty-headers.xls"); - String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -165,7 +167,8 @@ public void testEmptyHeadersImport() throws InstantiationException, IllegalAcces public void testOneHeaderEmptyImport() throws InstantiationException, IllegalAccessException { String fileLocation = getXlsFilePath("one-header-empty.xls"); - String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -182,7 +185,8 @@ public void testOneHeaderEmptyImport() throws InstantiationException, IllegalAcc public void testWithoutHeadersImport() throws InstantiationException, IllegalAccessException { String fileLocation = getXlsFilePath("without-headers.xls"); - String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -198,7 +202,8 @@ public void testWithoutHeadersImport() throws InstantiationException, IllegalAcc public void testInvalidHeadersImport() throws InstantiationException, IllegalAccessException { String fileLocation = getXlsFilePath("invalid-headers.xls"); - String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -216,7 +221,8 @@ public void testInvalidHeadersImport() throws InstantiationException, IllegalAcc public void testInvalidSheetNameImport() throws InstantiationException, IllegalAccessException { String fileLocation = getXlsFilePath("invalid-sheet-name.xlsx"); - String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -241,7 +247,8 @@ public void testMetadataGroupRowWithManyValuesImport() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("metadata-group-row-with-many-values.xlsx"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -275,7 +282,8 @@ public void testHeadersDuplicatedImport() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("headers-duplicated.xls"); - String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -300,7 +308,8 @@ public void testCreatePatent() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("create-patent.xls"); - String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -351,7 +360,8 @@ public void testUpdatePatent() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("update-patent.xls"); - String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -388,7 +398,8 @@ public void testCreatePublicationWithAuthority() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("create-publication-with-authority.xlsx"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -439,7 +450,8 @@ public void testManyPublicationImport() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("many-publications.xls"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -515,7 +527,8 @@ public void testManyPublicationImportWithErrorAndNotAbortOnError() throws Except context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("many-publications.xls"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -575,7 +588,8 @@ public void testManyPublicationImportWithErrorAndAbortOnError() throws Exception context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("many-publications.xls"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, "-e" }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail(), "-er"}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -630,7 +644,8 @@ public void testCreatePublicationWithOneInvalidAuthorityAndNoAbortOnError() thro context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("create-publication-with-one-invalid-authority.xlsx"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -675,7 +690,8 @@ public void testCreatePublicationWithOneInvalidAuthorityAndAbortOnError() throws context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("create-publication-with-one-invalid-authority.xlsx"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, "-e" }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail(), "-er"}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -714,7 +730,8 @@ public void testCreatePublicationWithWillBeGeneratedAuthority() throws Exception String publicationCollectionId = publications.getID().toString(); String fileLocation = getXlsFilePath("create-publication-with-will-be-generated-authority.xls"); - String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, "-e" }; + String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, + "-e", eperson.getEmail(), "-er"}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); @@ -760,7 +777,8 @@ public void testCreatePublicationWithWillBeGeneratedAuthorityAndNoRelatedItemFou String publicationCollectionId = publications.getID().toString(); String fileLocation = getXlsFilePath("create-publication-with-will-be-generated-authority.xls"); - String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, "-e" }; + String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, + "-e", eperson.getEmail(), "-er"}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); @@ -810,7 +828,8 @@ public void testCreatePublicationWithWillBeReferencedAuthority() throws Exceptio String publicationCollectionId = publications.getID().toString(); String fileLocation = getXlsFilePath("create-publication-with-will-be-referenced-authority.xls"); - String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, "-e" }; + String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, + "-e", eperson.getEmail(), "-er"}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); @@ -856,7 +875,8 @@ public void testCreatePublicationWithWillBeReferencedAuthorityAndNoRelatedItemFo String publicationCollectionId = publications.getID().toString(); String fileLocation = getXlsFilePath("create-publication-with-will-be-referenced-authority.xls"); - String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, "-e" }; + String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, + "-e", eperson.getEmail(), "-er"}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); @@ -898,7 +918,8 @@ public void testCreatePublicationInWorkspace() throws Exception { String publicationCollectionId = publications.getID().toString(); String fileLocation = getXlsFilePath("create-workspace-publication.xls"); - String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, "-e" }; + String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, + "-e", eperson.getEmail(), "-er"}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -942,7 +963,8 @@ public void testCreateArchivedPublication() throws Exception { String publicationCollectionId = publications.getID().toString(); String fileLocation = getXlsFilePath("create-archived-publication.xls"); - String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, "-e" }; + String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, + "-e", eperson.getEmail(), "-er"}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); @@ -995,7 +1017,8 @@ public void testUpdateWorkflowPatentWithValidWorkspaceItem() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("update-workflow-patent.xls"); - String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1045,7 +1068,8 @@ public void testUpdateWorkflowPatentWithInvalidWorkspaceItem() throws Exception context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("update-workflow-patent.xls"); - String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1097,7 +1121,8 @@ public void testUpdateWorkflowPatentWithoutWorkspaceItem() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("update-workflow-patent.xls"); - String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1146,7 +1171,8 @@ public void testUpdateArchivePatentWithWorkspaceItem() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("update-archive-patent.xls"); - String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1195,7 +1221,8 @@ public void testUpdateArchivePatentWithWorkflowItem() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("update-archive-patent.xls"); - String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1244,7 +1271,8 @@ public void testUpdateArchivePatentWithAlreadyArchivedItem() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("update-archive-patent.xls"); - String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1295,7 +1323,8 @@ public void testAutomaticReferenceResolution() throws Exception { String publicationCollectionId = publications.getID().toString(); String fileLocation = getXlsFilePath("create-publication-with-will-be-referenced-authority.xls"); - String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, "-e" }; + String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, + "-e" , eperson.getEmail(), "-er"}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); @@ -1316,7 +1345,8 @@ public void testAutomaticReferenceResolution() throws Exception { String personsCollectionId = persons.getID().toString(); fileLocation = getXlsFilePath("create-person.xls"); - args = new String[] { "bulk-import", "-c", personsCollectionId, "-f", fileLocation, "-e" }; + args = new String[] { "bulk-import", "-c", personsCollectionId, "-f", fileLocation, + "-e" , eperson.getEmail(), "-er"}; handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); @@ -1350,7 +1380,8 @@ public void testUploadSingleBitstream() throws Exception { String fileLocation = getXlsFilePath("add-bitstream-to-item.xls"); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1404,7 +1435,8 @@ public void testUploadMultipleBitstreams() throws Exception { String fileLocation = getXlsFilePath("add-multiple-bitstreams-to-items.xls"); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1458,7 +1490,8 @@ public void testUploadMultipleBitstreamWithPathTraversal() throws Exception { String fileLocation = getXlsFilePath("add-multiple-bitstreams-with-path-traversal-to-items.xls"); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1509,7 +1542,8 @@ public void testUploadSingleBitstreamUpdate() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("add-bitstream-to-item-update.xls"); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1555,7 +1589,8 @@ public void testUploadMultipleBitstreamsUpdateMultiple() throws Exception { String fileName = "add-bitstream-to-multiple-items-update.xls"; String fileLocation = getXlsFilePath(fileName); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1602,7 +1637,8 @@ public void testUploadSingleBitstreamUpdateWithExistingBundle() throws Exception String fileName = "add-bitstream-to-item-bundle.xls"; String fileLocation = getXlsFilePath(fileName); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1639,7 +1675,8 @@ public void testCreatePublicationInWorkspaceItemsAndItemHasLicense() throws Exce context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("items-with-bitstreams.xlsx"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1702,7 +1739,8 @@ public void testCreatePublicationInWorkspaceItemsWithBitstreams() throws Excepti String fileName = "items-with-bitstreams.xlsx"; String fileLocation = getXlsFilePath(fileName); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1786,7 +1824,8 @@ public void testUpdateAndDeleteBitstreamsOfItems() throws Exception { String fileName = "update-delete-bitstreams-of-items.xls"; String fileLocation = getXlsFilePath(fileName); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1855,7 +1894,8 @@ public void testBitstreamUpdateAndDeleteWithWrongPosition() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("update-delete-bitstreams-of-items.xls"); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1918,7 +1958,8 @@ public void testBitstreamUpdateWithAdditionalConditionSetToFalse() throws Except context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("update-bitstream-policies-without-additional-ac.xls"); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1981,7 +2022,8 @@ public void testUpdateItems() throws Exception { // start test String fileLocation = getXlsFilePath("update-items.xls"); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); assertThat(handler.getErrorMessages(), empty()); @@ -2031,7 +2073,8 @@ public void testCreatePublicationWithSecurityLevel() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("create-publication-with-security-level.xlsx"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -2081,7 +2124,8 @@ public void testUpdatePublicationWithSecurityLevel() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("update-publication-with-security-level.xlsx"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -2121,7 +2165,8 @@ public void testWorkbookWithoutActionColumn() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("without-action-column.xls"); - String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -2182,7 +2227,8 @@ public void testWorkbookWithDiscoverableColumn() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("publications_with_discoverable_column.xlsx"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -2237,7 +2283,8 @@ public void testWorkbookWithInvalidOptionalColumnPosition() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("invalid-optional-column-position.xlsx"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -2246,6 +2293,28 @@ public void testWorkbookWithInvalidOptionalColumnPosition() throws Exception { + "must be placed before the metadata fields")); } + @Test + public void testCreatePatentByNotCollectionAdmin() throws Exception { + context.turnOffAuthorisationSystem(); + Collection patents = createCollection(context, community) + .withSubmissionDefinition("patent") + .withAdminGroup(admin) + .build(); + context.commit(); + context.restoreAuthSystemState(); + + String fileLocation = getXlsFilePath("create-patent.xls"); + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + + handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); + + List errorMessages = handler.getErrorMessages(); + assertThat("Expected 1 error message", errorMessages, hasSize(1)); + assertThat(errorMessages.get(0), containsString("The user is not an admin of the given collection")); + } + private WorkspaceItem findWorkspaceItem(Item item) throws SQLException { return workspaceItemService.findByItem(context, item); } diff --git a/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportIT.java b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportIT.java index f767ba1663ae..0b7fd8026803 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportIT.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportIT.java @@ -99,8 +99,9 @@ public void metadataExportWithoutFileParameter() script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); } if (script != null) { - script.initialize(args, testDSpaceRunnableHandler, null); - script.run(); + if (DSpaceRunnable.StepResult.Continue.equals(script.initialize(args, testDSpaceRunnableHandler, null))) { + script.run(); + } } } @@ -206,8 +207,9 @@ public void metadataExportToCsvTest_NonValidIdentifier() throws Exception { script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); } if (script != null) { - script.initialize(args, testDSpaceRunnableHandler, null); - script.run(); + if (DSpaceRunnable.StepResult.Continue.equals(script.initialize(args, testDSpaceRunnableHandler, null))) { + script.run(); + } } Exception exceptionDuringTestRun = testDSpaceRunnableHandler.getException(); @@ -235,8 +237,9 @@ public void metadataExportToCsvTest_NonValidDSOType() throws Exception { script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); } if (script != null) { - script.initialize(args, testDSpaceRunnableHandler, null); - script.run(); + if (DSpaceRunnable.StepResult.Continue.equals(script.initialize(args, testDSpaceRunnableHandler, null))) { + script.run(); + } } Exception exceptionDuringTestRun = testDSpaceRunnableHandler.getException(); diff --git a/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataImportIT.java b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataImportIT.java index c7fed1be6d3d..79cf6f56d56d 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataImportIT.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataImportIT.java @@ -156,8 +156,9 @@ public void metadataImportWithoutEPersonParameterTest() script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); } if (script != null) { - script.initialize(args, testDSpaceRunnableHandler, null); - script.run(); + if (DSpaceRunnable.StepResult.Continue.equals(script.initialize(args, testDSpaceRunnableHandler, null))) { + script.run(); + } } } diff --git a/dspace-api/src/test/java/org/dspace/app/bulkimport/service/BulkImportWorkbookBuilderIT.java b/dspace-api/src/test/java/org/dspace/app/bulkimport/service/BulkImportWorkbookBuilderIT.java index eed2826ea67a..bef8ca45c09c 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkimport/service/BulkImportWorkbookBuilderIT.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkimport/service/BulkImportWorkbookBuilderIT.java @@ -128,6 +128,14 @@ public void testWorkbookBuildingFromItemDtos() throws Exception { String authorId = author.getID().toString(); + Item testUser = ItemBuilder.createItem(context, persons) + .withTitle("Test User") + .build(); + + Item jesse = ItemBuilder.createItem(context, persons) + .withTitle("Jesse Pinkman") + .build(); + context.restoreAuthSystemState(); List metadata = new ArrayList<>(); @@ -153,9 +161,11 @@ public void testWorkbookBuildingFromItemDtos() throws Exception { metadata.add(new MetadataValueDTO("dc", "date", "issued", "2022/02/15")); metadata.add(new MetadataValueDTO("dc", "type", null, "Book")); metadata.add(new MetadataValueDTO("dc", "language", "iso", "it")); - metadata.add(new MetadataValueDTO("dc", "contributor", "author", "Jesse Pinkman")); + metadata.add(new MetadataValueDTO("dc", "contributor", "author", null, "Jesse Pinkman", + jesse.getID().toString(), 600)); metadata.add(new MetadataValueDTO("oairecerif", "author", "affiliation", PLACEHOLDER_PARENT_METADATA_VALUE)); - metadata.add(new MetadataValueDTO("dc", "contributor", "author", "Test User")); + metadata.add(new MetadataValueDTO("dc", "contributor", "author", null, "Test User", + testUser.getID().toString(), 600)); metadata.add(new MetadataValueDTO("oairecerif", "author", "affiliation", "Company")); bitstreams = new ArrayList(); @@ -176,7 +186,8 @@ public void testWorkbookBuildingFromItemDtos() throws Exception { String tempLocation = storeInTempLocation(workbook); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", tempLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", tempLocation, + "-e", admin.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -198,7 +209,7 @@ public void testWorkbookBuildingFromItemDtos() throws Exception { Item firstItem = getItemFromMessage(handler.getInfoMessages().get(7)); assertThat(firstItem, notNullValue()); - assertThat(firstItem.getMetadata(), hasSize(14)); + assertThat(firstItem.getMetadata(), hasSize(18)); assertThat(firstItem.getMetadata(), hasItems( with("dc.title", "Test Publication"), with("dc.date.issued", "2020/02/15"), @@ -208,7 +219,9 @@ public void testWorkbookBuildingFromItemDtos() throws Exception { with("dc.subject", "Java", 1), with("dc.subject", "DSpace", 2), with("dc.contributor.author", "White, Walter", authorId, 600), - with("oairecerif.author.affiliation", PLACEHOLDER_PARENT_METADATA_VALUE))); + with("oairecerif.author.affiliation", PLACEHOLDER_PARENT_METADATA_VALUE), + with("cris.virtual.department", PLACEHOLDER_PARENT_METADATA_VALUE), + with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE))); assertThat(getItemBitstreamsByBundle(firstItem, "ORIGINAL"), contains( bitstreamWith("Bitstream 1", "First bitstream content"), @@ -216,17 +229,22 @@ public void testWorkbookBuildingFromItemDtos() throws Exception { Item secondItem = getItemFromMessage(handler.getInfoMessages().get(10)); assertThat(secondItem, notNullValue()); - assertThat(secondItem.getMetadata(), hasSize(14)); + assertThat(secondItem.getMetadata(), hasSize(22)); assertThat(secondItem.getMetadata(), hasItems( with("dc.title", "Second Publication"), with("dc.date.issued", "2022/02/15"), with("dspace.entity.type", "Publication"), with("dc.type", "Book"), with("dc.language.iso", "it"), - with("dc.contributor.author", "Jesse Pinkman"), - with("dc.contributor.author", "Test User", 1), + with("dc.contributor.author", "Jesse Pinkman", jesse.getID().toString(), 600), + with("dc.contributor.author", "Test User", testUser.getID().toString(), 1, 600), with("oairecerif.author.affiliation", PLACEHOLDER_PARENT_METADATA_VALUE), - with("oairecerif.author.affiliation", "Company", 1))); + with("oairecerif.author.affiliation", "Company", 1), + with("cris.virtual.department", PLACEHOLDER_PARENT_METADATA_VALUE), + with("cris.virtual.department", PLACEHOLDER_PARENT_METADATA_VALUE), + with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE), + with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE) + )); assertThat(getItemBitstreamsByBundle(secondItem, "ORIGINAL"), contains( bitstreamWith("Bitstream 3", "Third bitstream content"))); diff --git a/dspace-api/src/test/java/org/dspace/app/csv/CSVMetadataImportReferenceIT.java b/dspace-api/src/test/java/org/dspace/app/csv/CSVMetadataImportReferenceIT.java index f4f048ff4bdf..807a2e711e17 100644 --- a/dspace-api/src/test/java/org/dspace/app/csv/CSVMetadataImportReferenceIT.java +++ b/dspace-api/src/test/java/org/dspace/app/csv/CSVMetadataImportReferenceIT.java @@ -714,8 +714,10 @@ public int performImportScript(String[] csv, boolean validateOnly) throws Except script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); } if (script != null) { - script.initialize(args, testDSpaceRunnableHandler, null); - script.run(); + if (DSpaceRunnable.StepResult.Continue + .equals(script.initialize(args, testDSpaceRunnableHandler, null))) { + script.run(); + } } if (testDSpaceRunnableHandler.getException() != null) { throw testDSpaceRunnableHandler.getException(); diff --git a/dspace-api/src/test/java/org/dspace/app/filetype/consumer/FileTypeMetadataEnhancerConsumerIT.java b/dspace-api/src/test/java/org/dspace/app/filetype/consumer/FileTypeMetadataEnhancerConsumerIT.java new file mode 100644 index 000000000000..9bdc3f752065 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/filetype/consumer/FileTypeMetadataEnhancerConsumerIT.java @@ -0,0 +1,497 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.filetype.consumer; + +import static org.dspace.app.matcher.MetadataValueMatcher.with; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.not; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.sql.SQLException; +import java.text.ParseException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.function.Predicate; + +import org.apache.commons.codec.binary.StringUtils; +import org.apache.curator.shaded.com.google.common.base.Objects; +import org.apache.tools.ant.filters.StringInputStream; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.authorize.AuthorizeException; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.ResourcePolicyBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.MetadataValue; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.event.factory.EventServiceFactory; +import org.dspace.event.service.EventService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +public class FileTypeMetadataEnhancerConsumerIT extends AbstractIntegrationTestWithDatabase { + + private static final ConfigurationService configurationService = + DSpaceServicesFactory.getInstance().getConfigurationService(); + + private static final EventService eventService = EventServiceFactory.getInstance().getEventService(); + + private Collection collection; + + private final BitstreamService bitstreamService = ContentServiceFactory.getInstance() + .getBitstreamService(); + private final ItemService itemService = ContentServiceFactory.getInstance() + .getItemService(); + + private static String[] consumers; + + /** + * This method will be run before the first test as per @BeforeClass. It will + * configure the event.dispatcher.default.consumers property to remove the + * FileTypeMetadataEnhancerConsumer. + */ + @BeforeClass + public static void initConsumers() { + consumers = configurationService.getArrayProperty("event.dispatcher.default.consumers"); + Set consumersSet = new HashSet(Arrays.asList(consumers)); + if (!consumersSet.contains("filetypemetadataenhancer")) { + consumersSet.add("filetypemetadataenhancer"); + configurationService.setProperty("event.dispatcher.default.consumers", consumersSet.toArray()); + eventService.reloadConfiguration(); + } + } + + /** + * Reset the event.dispatcher.default.consumers property value. + */ + @AfterClass + public static void resetDefaultConsumers() { + configurationService.setProperty("event.dispatcher.default.consumers", consumers); + eventService.reloadConfiguration(); + } + + @Before + public void setup() { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context).withName("Parent Community").build(); + + collection = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1").build(); + + context.restoreAuthSystemState(); + } + + @Test + public void testWithoutBitstreams() + throws FileNotFoundException, SQLException, AuthorizeException, IOException, ParseException { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection).build(); + context.restoreAuthSystemState(); + context.commit(); + + item = context.reloadEntity(item); + + assertThat(item.getMetadata(), not(hasItem(with("dc.type", null)))); + assertThat(item.getMetadata(), not(hasItem(with("dspace.file_type", null)))); + + context.turnOffAuthorisationSystem(); + this.itemService.update(context, item); + context.restoreAuthSystemState(); + + item = context.reloadEntity(item); + + assertThat(item.getMetadata(), not(hasItem(with("dc.type", null)))); + assertThat(item.getMetadata(), not(hasItem(with("dspace.file.type", null)))); + } + + @Test + public void testWithoutEntityType() + throws FileNotFoundException, SQLException, AuthorizeException, IOException, ParseException { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection).build(); + Bitstream bitstream = BitstreamBuilder + .createBitstream(context, item, new StringInputStream("test")) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + bitstream = context.reloadEntity(bitstream); + item = context.reloadEntity(item); + + assertThat(bitstream.getMetadata(), not(hasItem(with("dc.type", null)))); + assertThat(item.getMetadata(), not(hasItem(with("dspace.file.type", null)))); + } + + @Test + public void testWithEntityTypeDelete() + throws FileNotFoundException, SQLException, AuthorizeException, IOException, ParseException { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection).build(); + Bitstream bitstream = + BitstreamBuilder + .createBitstream(context, item, new StringInputStream("test")) + .build(); + + ResourcePolicyBuilder + .createResourcePolicy(context) + .withDspaceObject(bitstream) + .withAction(Constants.READ) + .withUser(admin) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + context.turnOffAuthorisationSystem(); + + this.bitstreamService.delete(context, bitstream); + + context.restoreAuthSystemState(); + context.commit(); + + bitstream = context.reloadEntity(bitstream); + item = context.reloadEntity(item); + + assertThat(bitstream.getMetadata(), not(hasItem(with("dc.type", null)))); + assertThat(item.getMetadata(), not(hasItem(with("dspace.file.type", null)))); + } + + @Test + public void testWithEntityType() + throws FileNotFoundException, SQLException, AuthorizeException, IOException, ParseException { + final String type = "Publication"; + context.turnOffAuthorisationSystem(); + final Item item = + ItemBuilder + .createItem(context, collection) + .build(); + Bitstream bitstream = + BitstreamBuilder + .createBitstream(context, item, new StringInputStream("test")) + .withType(type) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + bitstream = context.reloadEntity(bitstream); + + assertThat(bitstream.getMetadata(), hasItem(with("dc.type", type))); + assertThat(bitstream.getMetadata(), not(hasItem(with("dspace.file.type", type)))); + assertThat(item.getMetadata(), not(hasItem(with("dc.type", type)))); + assertThat(item.getMetadata(), hasItem(with("dspace.file.type", type))); + } + + @Test + public void testWithTypeEdited() + throws FileNotFoundException, SQLException, AuthorizeException, IOException, ParseException { + String type = "Publication"; + context.turnOffAuthorisationSystem(); + Item item = + ItemBuilder + .createItem(context, collection) + .build(); + Bitstream bitstream = + BitstreamBuilder + .createBitstream(context, item, new StringInputStream("test")) + .withType(type) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + bitstream = context.reloadEntity(bitstream); + item = context.reloadEntity(item); + + assertThat(bitstream.getMetadata(), hasItem(with("dc.type", type))); + assertThat(bitstream.getMetadata(), not(hasItem(with("dspace.file.type", type)))); + assertThat(item.getMetadata(), hasItem(with("dspace.file.type", type))); + assertThat(item.getMetadata(), not(hasItem(with("dc.type", type)))); + + context.turnOffAuthorisationSystem(); + + type = "Thesis"; + this.bitstreamService.setMetadataSingleValue(context, bitstream, + FileTypeMetadataEnhancerConsumer.entityTypeMetadata, null, type); + this.bitstreamService.update(context, bitstream); + + context.restoreAuthSystemState(); + context.commit(); + + bitstream = context.reloadEntity(bitstream); + item = context.reloadEntity(item); + + assertThat(bitstream.getMetadata(), hasItem(with("dc.type", type))); + assertThat(bitstream.getMetadata(), not(hasItem(with("dspace.file.type", type)))); + assertThat(item.getMetadata(), hasItem(with("dspace.file.type", type))); + assertThat(item.getMetadata(), not(hasItem(with("dc.type", type)))); + } + + @Test + public void testWithTypeDeleted() + throws FileNotFoundException, SQLException, AuthorizeException, IOException, ParseException { + final String type = "Publication"; + context.turnOffAuthorisationSystem(); + Item item = + ItemBuilder + .createItem(context, collection) + .build(); + Bitstream bitstream = + BitstreamBuilder + .createBitstream(context, item, new StringInputStream("test")) + .withType(type) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + bitstream = context.reloadEntity(bitstream); + final MetadataValue entityType = bitstream.getMetadata() + .stream() + .filter(metadataFilter(FileTypeMetadataEnhancerConsumer.entityTypeMetadata)) + .findFirst() + .orElseThrow(); + bitstreamService.removeMetadataValues(context, bitstream, List.of(entityType)); + context.turnOffAuthorisationSystem(); + + this.bitstreamService.update(context, bitstream); + + context.restoreAuthSystemState(); + context.commit(); + + bitstream = context.reloadEntity(bitstream); + item = context.reloadEntity(item); + + assertThat(bitstream.getMetadata(), not(hasItem(withField("dc.type")))); + assertThat(item.getMetadata(),not(hasItem(withField("dspace.file.type")))); + } + + @Test + public void testWithMultipleEntityType() + throws FileNotFoundException, SQLException, AuthorizeException, IOException, ParseException { + final String type = "Publication"; + final String type1 = "Thesis"; + context.turnOffAuthorisationSystem(); + final Item item = + ItemBuilder + .createItem(context, collection) + .build(); + Bitstream bitstream = + BitstreamBuilder + .createBitstream(context, item, new StringInputStream("test")) + .withType(type) + .build(); + final Bitstream bitstream1 = + BitstreamBuilder + .createBitstream(context, item, new StringInputStream("test")) + .withType(type1) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + bitstream = context.reloadEntity(bitstream); + + assertThat(bitstream.getMetadata(), hasItem(with("dc.type", type))); + assertThat(bitstream.getMetadata(), not(hasItem(with("dspace.file.type", type)))); + assertThat(bitstream1.getMetadata(), hasItem(with("dc.type", type1))); + assertThat(bitstream1.getMetadata(), not(hasItem(with("dspace.file.type", type1)))); + assertThat(item.getMetadata(), not(hasItem(with("dc.type", type)))); + assertThat(item.getMetadata(), not(hasItem(with("dc.type", type1)))); + assertThat(item.getMetadata(), hasItem(with("dspace.file.type", type, null, 0, -1))); + assertThat(item.getMetadata(), hasItem(with("dspace.file.type", type1, null, 1, -1))); + } + + @Test + public void testWithMultipleEntityTypeEdited() + throws FileNotFoundException, SQLException, AuthorizeException, IOException, ParseException { + String type = "Publication"; + String type1 = "Thesis"; + context.turnOffAuthorisationSystem(); + Item item = + ItemBuilder + .createItem(context, collection) + .build(); + Bitstream bitstream = + BitstreamBuilder + .createBitstream(context, item, new StringInputStream("test")) + .withType(type) + .build(); + Bitstream bitstream1 = + BitstreamBuilder + .createBitstream(context, item, new StringInputStream("test")) + .withType(type1) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + bitstream = context.reloadEntity(bitstream); + bitstream1 = context.reloadEntity(bitstream1); + + assertThat(bitstream.getMetadata(), hasItem(with("dc.type", type))); + assertThat(bitstream.getMetadata(), not(hasItem(with("dspace.file.type", type)))); + assertThat(bitstream1.getMetadata(), hasItem(with("dc.type", type1))); + assertThat(bitstream1.getMetadata(), not(hasItem(with("dspace.file.type", type1)))); + assertThat(item.getMetadata(), not(hasItem(with("dc.type", type)))); + assertThat(item.getMetadata(), not(hasItem(with("dc.type", type1)))); + assertThat(item.getMetadata(), hasItem(with("dspace.file.type", type, null, 0, -1))); + assertThat(item.getMetadata(), hasItem(with("dspace.file.type", type1, null, 1, -1))); + + context.turnOffAuthorisationSystem(); + + type = "Journal"; + this.bitstreamService.setMetadataSingleValue( + context, + bitstream, + FileTypeMetadataEnhancerConsumer.entityTypeMetadata, + null, + type + ); + this.bitstreamService.update(context, bitstream); + + type1 = "Journal Article"; + this.bitstreamService.setMetadataSingleValue( + context, + bitstream1, + FileTypeMetadataEnhancerConsumer.entityTypeMetadata, + null, + type1 + ); + this.bitstreamService.update(context, bitstream1); + + context.restoreAuthSystemState(); + context.commit(); + + bitstream = context.reloadEntity(bitstream); + bitstream1 = context.reloadEntity(bitstream1); + item = context.reloadEntity(item); + + assertThat(bitstream.getMetadata(), hasItem(with("dc.type", type))); + assertThat(bitstream.getMetadata(), not(hasItem(with("dspace.file.type", type)))); + assertThat(bitstream1.getMetadata(), hasItem(with("dc.type", type1))); + assertThat(bitstream1.getMetadata(), not(hasItem(with("dspace.file.type", type1)))); + assertThat(item.getMetadata(), not(hasItem(with("dc.type", type)))); + assertThat(item.getMetadata(), not(hasItem(with("dc.type", type1)))); + assertThat(item.getMetadata(), hasItem(with("dspace.file.type", type, null, 0, -1))); + assertThat(item.getMetadata(), hasItem(with("dspace.file.type", type1, null, 1, -1))); + } + + @Test + public void testWithMultipleEntityTypeDelete() + throws FileNotFoundException, SQLException, AuthorizeException, IOException, ParseException { + final String type = "Publication"; + final String type1 = "Thesis"; + context.turnOffAuthorisationSystem(); + Item item = + ItemBuilder + .createItem(context, collection) + .build(); + Bitstream bitstream = + BitstreamBuilder + .createBitstream(context, item, new StringInputStream("test")) + .withType(type) + .build(); + Bitstream bitstream1 = + BitstreamBuilder + .createBitstream(context, item, new StringInputStream("test")) + .withType(type1) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + bitstream = context.reloadEntity(bitstream); + bitstream1 = context.reloadEntity(bitstream1); + + assertThat(bitstream.getMetadata(), hasItem(with("dc.type", type))); + assertThat(bitstream.getMetadata(), not(hasItem(with("dspace.file.type", type)))); + assertThat(bitstream1.getMetadata(), hasItem(with("dc.type", type1))); + assertThat(bitstream1.getMetadata(), not(hasItem(with("dspace.file.type", type1)))); + assertThat(item.getMetadata(), not(hasItem(with("dc.type", type)))); + assertThat(item.getMetadata(), not(hasItem(with("dc.type", type1)))); + assertThat(item.getMetadata(), hasItem(with("dspace.file.type", type, null, 0, -1))); + assertThat(item.getMetadata(), hasItem(with("dspace.file.type", type1, null, 1, -1))); + + context.turnOffAuthorisationSystem(); + + this.bitstreamService.clearMetadata( + context, + bitstream, + FileTypeMetadataEnhancerConsumer.entityTypeMetadata.schema, + FileTypeMetadataEnhancerConsumer.entityTypeMetadata.element, + FileTypeMetadataEnhancerConsumer.entityTypeMetadata.qualifier, + null + ); + this.bitstreamService.update(context, bitstream); + + context.restoreAuthSystemState(); + context.commit(); + + bitstream = context.reloadEntity(bitstream); + bitstream1 = context.reloadEntity(bitstream1); + item = context.reloadEntity(item); + + assertThat(bitstream.getMetadata(), not(hasItem(with("dc.type", type)))); + assertThat(bitstream.getMetadata(), not(hasItem(with("dspace.file.type", type)))); + assertThat(bitstream1.getMetadata(), hasItem(with("dc.type", type1))); + assertThat(bitstream1.getMetadata(), not(hasItem(with("dspace.file.type", type1)))); + assertThat(item.getMetadata(), not(hasItem(with("dc.type", type)))); + assertThat(item.getMetadata(), not(hasItem(with("dc.type", type1)))); + assertThat(item.getMetadata(), not(hasItem(with("dspace.file.type", type, null, 0, -1)))); + assertThat(item.getMetadata(), not(hasItem(with("dspace.file.type", type1, null, 1, -1)))); + assertThat(item.getMetadata(), hasItem(with("dspace.file.type", type1, null, 0, -1))); + } + + private Predicate metadataFilter(MetadataFieldName metadataField) { + return metadata -> + StringUtils.equals(metadataField.schema, metadata.getSchema()) && + StringUtils.equals(metadataField.element, metadata.getElement()) && + StringUtils.equals(metadataField.qualifier, metadata.getQualifier()); + } + + public static Matcher withField(final String field) { + return new TypeSafeMatcher() { + + @Override + protected void describeMismatchSafely(MetadataValue metadataValue, Description description) { + description.appendText("was ").appendValue(metadataValue.getMetadataField().toString('.')); + } + + @Override + public void describeTo(Description description) { + description.appendText("MetadataValue with id ").appendValue(field); + } + + @Override + protected boolean matchesSafely(MetadataValue metadataValue) { + return Objects.equal(field, metadataValue.getMetadataField().toString('.')); + } + }; + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java b/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java index cee6ebcfb0e3..423e7186e623 100644 --- a/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java +++ b/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java @@ -8,6 +8,7 @@ package org.dspace.app.itemimport; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import java.io.File; import java.nio.file.Files; @@ -33,6 +34,7 @@ import org.dspace.content.service.RelationshipService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.flywaydb.core.internal.util.ExceptionUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -46,6 +48,7 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase { private static final String ZIP_NAME = "saf.zip"; + private static final String PDF_NAME = "test.pdf"; private static final String publicationTitle = "A Tale of Two Cities"; private static final String personTitle = "Person Test"; @@ -55,6 +58,7 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase { private Collection collection; private Path tempDir; private Path workDir; + private static final String TEMP_DIR = ItemImport.TEMP_DIR; @Before @Override @@ -226,6 +230,10 @@ public void importItemByZipSafWithBitstreams() throws Exception { checkMetadata(); checkMetadataWithAnotherSchema(); checkBitstream(); + + // confirm that TEMP_DIR still exists + File workTempDir = new File(workDir + File.separator + TEMP_DIR); + assertTrue(workTempDir.exists()); } @Test @@ -254,6 +262,23 @@ public void importItemByZipSafWithRelationships() throws Exception { checkRelationship(); } + @Test + public void importItemByZipSafInvalidMimetype() throws Exception { + // use sample PDF file + Files.copy(getClass().getResourceAsStream("test.pdf"), + Path.of(tempDir.toString() + "/" + PDF_NAME)); + + String[] args = new String[] { "import", "-a", "-e", admin.getEmail(), "-c", collection.getID().toString(), + "-s", tempDir.toString(), "-z", PDF_NAME, "-m", tempDir.toString() + + "/mapfile.out" }; + try { + perfomImportScript(args); + } catch (Exception e) { + // should throw an exception due to invalid mimetype + assertEquals(UnsupportedOperationException.class, ExceptionUtils.getRootCause(e).getClass()); + } + } + @Test public void resumeImportItemBySafWithMetadataOnly() throws Exception { // create simple SAF diff --git a/dspace-api/src/test/java/org/dspace/app/matcher/ResourcePolicyMatcher.java b/dspace-api/src/test/java/org/dspace/app/matcher/ResourcePolicyMatcher.java index 4b804a131410..b208630b602b 100644 --- a/dspace-api/src/test/java/org/dspace/app/matcher/ResourcePolicyMatcher.java +++ b/dspace-api/src/test/java/org/dspace/app/matcher/ResourcePolicyMatcher.java @@ -21,7 +21,7 @@ import org.hamcrest.TypeSafeMatcher; /** - * Implementation of {@link org.hamcrest.Matcher} to match a ResourcePolicy. + * Implementation of {@link Matcher} to match a ResourcePolicy. * * @author Luca Giamminonni (luca.giamminonni at 4science.it) * diff --git a/dspace-api/src/test/java/org/dspace/app/mediafilter/MediaFilterIT.java b/dspace-api/src/test/java/org/dspace/app/mediafilter/MediaFilterIT.java new file mode 100644 index 000000000000..aef2476fdc45 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/mediafilter/MediaFilterIT.java @@ -0,0 +1,237 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.mediafilter; + +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.io.InputStream; +import java.sql.SQLException; +import java.util.Iterator; +import java.util.List; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.authorize.AuthorizeException; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.ItemService; +import org.junit.Before; +import org.junit.Test; + +/** + * Tests of {@link MediaFilterScript}. + * + * @author Andrea Bollini + */ +public class MediaFilterIT extends AbstractIntegrationTestWithDatabase { + + private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + private BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); + protected Community topComm1; + protected Community topComm2; + protected Community childComm1_1; + protected Community childComm1_2; + protected Collection col1_1; + protected Collection col1_2; + protected Collection col1_1_1; + protected Collection col1_1_2; + protected Collection col1_2_1; + protected Collection col1_2_2; + protected Collection col2_1; + protected Item item1_1_a; + protected Item item1_1_b; + protected Item item1_2_a; + protected Item item1_2_b; + protected Item item1_1_1_a; + protected Item item1_1_1_b; + protected Item item1_1_2_a; + protected Item item1_1_2_b; + protected Item item1_2_1_a; + protected Item item1_2_1_b; + protected Item item1_2_2_a; + protected Item item1_2_2_b; + protected Item item2_1_a; + protected Item item2_1_b; + + @Before + public void setup() throws IOException, SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + topComm1 = CommunityBuilder.createCommunity(context).withName("Parent Community1").build(); + topComm2 = CommunityBuilder.createCommunity(context).withName("Parent Community2").build(); + childComm1_1 = CommunityBuilder.createCommunity(context).withName("Child Community1_1") + .addParentCommunity(context, topComm1).build(); + childComm1_2 = CommunityBuilder.createCommunity(context).withName("Child Community1_2") + .addParentCommunity(context, topComm1).build(); + col1_1 = CollectionBuilder.createCollection(context, topComm1).withName("Collection 1_1").build(); + col1_2 = CollectionBuilder.createCollection(context, topComm1).withName("Collection 1_2").build(); + col1_1_1 = CollectionBuilder.createCollection(context, childComm1_1).withName("Collection 1_1_1").build(); + col1_1_2 = CollectionBuilder.createCollection(context, childComm1_1).withName("Collection 1_1_2").build(); + col1_2_1 = CollectionBuilder.createCollection(context, childComm1_2).withName("Collection 1_1_1").build(); + col1_2_2 = CollectionBuilder.createCollection(context, childComm1_2).withName("Collection 1_2").build(); + col2_1 = CollectionBuilder.createCollection(context, topComm2).withName("Collection 2_1").build(); + + // Create two items in each collection, one with the test.csv file and one with the test.txt file + item1_1_a = ItemBuilder.createItem(context, col1_1).withTitle("Item 1_1_a").withIssueDate("2017-10-17").build(); + item1_1_b = ItemBuilder.createItem(context, col1_1).withTitle("Item 1_1_b").withIssueDate("2017-10-17").build(); + item1_1_1_a = ItemBuilder.createItem(context, col1_1_1).withTitle("Item 1_1_1_a").withIssueDate("2017-10-17") + .build(); + item1_1_1_b = ItemBuilder.createItem(context, col1_1_1).withTitle("Item 1_1_1_b").withIssueDate("2017-10-17") + .build(); + item1_1_2_a = ItemBuilder.createItem(context, col1_1_2).withTitle("Item 1_1_2_a").withIssueDate("2017-10-17") + .build(); + item1_1_2_b = ItemBuilder.createItem(context, col1_1_2).withTitle("Item 1_1_2_b").withIssueDate("2017-10-17") + .build(); + item1_2_a = ItemBuilder.createItem(context, col1_2).withTitle("Item 1_2_a").withIssueDate("2017-10-17").build(); + item1_2_b = ItemBuilder.createItem(context, col1_2).withTitle("Item 1_2_b").withIssueDate("2017-10-17").build(); + item1_2_1_a = ItemBuilder.createItem(context, col1_2_1).withTitle("Item 1_2_1_a").withIssueDate("2017-10-17") + .build(); + item1_2_1_b = ItemBuilder.createItem(context, col1_2_1).withTitle("Item 1_2_1_b").withIssueDate("2017-10-17") + .build(); + item1_2_2_a = ItemBuilder.createItem(context, col1_2_2).withTitle("Item 1_2_2_a").withIssueDate("2017-10-17") + .build(); + item1_2_2_b = ItemBuilder.createItem(context, col1_2_2).withTitle("Item 1_2_2_b").withIssueDate("2017-10-17") + .build(); + item2_1_a = ItemBuilder.createItem(context, col2_1).withTitle("Item 2_1_a").withIssueDate("2017-10-17").build(); + item2_1_b = ItemBuilder.createItem(context, col2_1).withTitle("Item 2_1_b").withIssueDate("2017-10-17").build(); + addBitstream(item1_1_a, "test.csv"); + addBitstream(item1_1_b, "test.txt"); + addBitstream(item1_2_a, "test.csv"); + addBitstream(item1_2_b, "test.txt"); + addBitstream(item1_1_1_a, "test.csv"); + addBitstream(item1_1_1_b, "test.txt"); + addBitstream(item1_1_2_a, "test.csv"); + addBitstream(item1_1_2_b, "test.txt"); + addBitstream(item1_2_1_a, "test.csv"); + addBitstream(item1_2_1_b, "test.txt"); + addBitstream(item1_2_2_a, "test.csv"); + addBitstream(item1_2_2_b, "test.txt"); + addBitstream(item2_1_a, "test.csv"); + addBitstream(item2_1_b, "test.txt"); + context.restoreAuthSystemState(); + } + + private void addBitstream(Item item, String filename) throws SQLException, AuthorizeException, IOException { + BitstreamBuilder.createBitstream(context, item, getClass().getResourceAsStream(filename)).withName(filename) + .guessFormat().build(); + } + + @Test + public void mediaFilterScriptAllItemsTest() throws Exception { + performMediaFilterScript(null); + Iterator items = itemService.findAll(context); + while (items.hasNext()) { + Item item = items.next(); + checkItemHasBeenProcessed(item); + } + } + + @Test + public void mediaFilterScriptIdentifiersTest() throws Exception { + // process the item 1_1_a and verify that no other items has been processed using the "closer" one + performMediaFilterScript(item1_1_a); + checkItemHasBeenProcessed(item1_1_a); + checkItemHasBeenNotProcessed(item1_1_b); + // process the collection 1_1_1 and verify that items in another collection has not been processed + performMediaFilterScript(col1_1_1); + checkItemHasBeenProcessed(item1_1_1_a); + checkItemHasBeenProcessed(item1_1_1_b); + checkItemHasBeenNotProcessed(item1_1_2_a); + checkItemHasBeenNotProcessed(item1_1_2_b); + // process a top community with only collections + performMediaFilterScript(topComm2); + checkItemHasBeenProcessed(item2_1_a); + checkItemHasBeenProcessed(item2_1_b); + // verify that the other items have not been processed yet + checkItemHasBeenNotProcessed(item1_1_b); + checkItemHasBeenNotProcessed(item1_2_a); + checkItemHasBeenNotProcessed(item1_2_b); + checkItemHasBeenNotProcessed(item1_1_2_a); + checkItemHasBeenNotProcessed(item1_1_2_b); + checkItemHasBeenNotProcessed(item1_2_1_a); + checkItemHasBeenNotProcessed(item1_2_1_b); + checkItemHasBeenNotProcessed(item1_2_2_a); + checkItemHasBeenNotProcessed(item1_2_2_b); + // process a more structured community and verify that all the items at all levels are processed + performMediaFilterScript(topComm1); + // items that were already processed should stay processed + checkItemHasBeenProcessed(item1_1_a); + checkItemHasBeenProcessed(item1_1_1_a); + checkItemHasBeenProcessed(item1_1_1_b); + // residual items should have been processed as well now + checkItemHasBeenProcessed(item1_1_b); + checkItemHasBeenProcessed(item1_2_a); + checkItemHasBeenProcessed(item1_2_b); + checkItemHasBeenProcessed(item1_1_2_a); + checkItemHasBeenProcessed(item1_1_2_b); + checkItemHasBeenProcessed(item1_2_1_a); + checkItemHasBeenProcessed(item1_2_1_b); + checkItemHasBeenProcessed(item1_2_2_a); + checkItemHasBeenProcessed(item1_2_2_b); + } + + private void checkItemHasBeenNotProcessed(Item item) throws IOException, SQLException, AuthorizeException { + List textBundles = item.getBundles("TEXT"); + assertTrue("The item " + item.getName() + " should NOT have the TEXT bundle", textBundles.size() == 0); + } + + private void checkItemHasBeenProcessed(Item item) throws IOException, SQLException, AuthorizeException { + String expectedFileName = StringUtils.endsWith(item.getName(), "_a") ? "test.csv.txt" : "test.txt.txt"; + String expectedContent = StringUtils.endsWith(item.getName(), "_a") ? "data3,3" : "quick brown fox"; + List textBundles = item.getBundles("TEXT"); + assertTrue("The item " + item.getName() + " has NOT the TEXT bundle", textBundles.size() == 1); + List bitstreams = textBundles.get(0).getBitstreams(); + assertTrue("The item " + item.getName() + " has NOT exactly 1 bitstream in the TEXT bundle", + bitstreams.size() == 1); + assertTrue("The text bistream in the " + item.getName() + " is NOT named properly [" + expectedFileName + "]", + StringUtils.equals(bitstreams.get(0).getName(), expectedFileName)); + assertTrue("The text bistream in the " + item.getName() + " doesn't contain the proper content [" + + expectedContent + "]", StringUtils.contains(getContent(bitstreams.get(0)), expectedContent)); + } + + private CharSequence getContent(Bitstream bitstream) throws IOException, SQLException, AuthorizeException { + try (InputStream input = bitstreamService.retrieve(context, bitstream)) { + return IOUtils.toString(input, "UTF-8"); + } + } + + private void performMediaFilterScript(DSpaceObject dso) throws Exception { + if (dso != null) { + runDSpaceScript("filter-media", "-i", dso.getHandle()); + } else { + runDSpaceScript("filter-media"); + } + // reload our items to see the changes + item1_1_a = context.reloadEntity(item1_1_a); + item1_1_b = context.reloadEntity(item1_1_b); + item1_2_a = context.reloadEntity(item1_2_a); + item1_2_b = context.reloadEntity(item1_2_b); + item1_1_1_a = context.reloadEntity(item1_1_1_a); + item1_1_1_b = context.reloadEntity(item1_1_1_b); + item1_1_2_a = context.reloadEntity(item1_1_2_a); + item1_1_2_b = context.reloadEntity(item1_1_2_b); + item1_2_1_a = context.reloadEntity(item1_2_1_a); + item1_2_1_b = context.reloadEntity(item1_2_1_b); + item1_2_2_a = context.reloadEntity(item1_2_2_a); + item1_2_2_b = context.reloadEntity(item1_2_2_b); + item2_1_a = context.reloadEntity(item2_1_a); + item2_1_b = context.reloadEntity(item2_1_b); + + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/metadata/export/MetadataSchemaExportScriptIT.java b/dspace-api/src/test/java/org/dspace/app/metadata/export/MetadataSchemaExportScriptIT.java new file mode 100644 index 000000000000..6ed2279bb1fa --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/metadata/export/MetadataSchemaExportScriptIT.java @@ -0,0 +1,143 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.metadata.export; + +import static org.dspace.app.launcher.ScriptLauncher.handleScript; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.is; + +import java.io.File; +import java.io.FileInputStream; +import java.nio.charset.Charset; +import java.sql.SQLException; +import java.util.List; + +import org.apache.commons.io.IOUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.launcher.ScriptLauncher; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.dspace.authorize.AuthorizeException; +import org.dspace.builder.MetadataFieldBuilder; +import org.dspace.builder.MetadataSchemaBuilder; +import org.dspace.content.MetadataSchema; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.Before; +import org.junit.Test; + + +/** + * Integration tests for {@link MetadataSchemaExportScript} + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + * + */ +public class MetadataSchemaExportScriptIT extends AbstractIntegrationTestWithDatabase { + + private final ConfigurationService configurationService = + DSpaceServicesFactory.getInstance().getConfigurationService(); + + private MetadataSchema schema; + private List fields; + private String fileLocation; + + @Before + @SuppressWarnings("deprecation") + public void beforeTests() throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + schema = createMetadataSchema(); + fields = createFields(); + fileLocation = configurationService.getProperty("dspace.dir"); + context.restoreAuthSystemState(); + } + + private List createFields() throws SQLException, AuthorizeException { + return List.of( + MetadataFieldBuilder.createMetadataField(context, schema, "first", "metadata", "notes first"), + MetadataFieldBuilder.createMetadataField(context, schema, "second", "metadata", "notes second"), + MetadataFieldBuilder.createMetadataField(context, schema, "third", "metadata", "notes third"), + MetadataFieldBuilder.createMetadataField(context, schema, "element", null, null) + ); + } + + private MetadataSchema createMetadataSchema() throws SQLException, AuthorizeException { + return MetadataSchemaBuilder.createMetadataSchema(context, "test", "http://dspace.org/test").build(); + } + + @Test + public void testMetadataSchemaExport() throws Exception { + + File xml = new File(fileLocation + "/test-types.xml"); + xml.deleteOnExit(); + + String[] args = + new String[] { + "export-schema", + "-i", schema.getID().toString(), + "-f", xml.getAbsolutePath() + }; + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + + handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); + + assertThat(handler.getErrorMessages(), empty()); + assertThat( + handler.getInfoMessages(), + hasItem("Exporting the metadata-schema file for the schema " + schema.getName()) + ); + assertThat("The xml file should be created", xml.exists(), is(true)); + + + try (FileInputStream fis = new FileInputStream(xml)) { + String content = IOUtils.toString(fis, Charset.defaultCharset()); + assertThat(content, containsString("")); + assertThat(content, containsString("test")); + assertThat(content, containsString("http://dspace.org/test")); + assertThat(content, containsString("")); + assertThat(content, containsString("test")); + assertThat(content, containsString("first")); + assertThat(content, containsString("metadata")); + assertThat(content, containsString("notes first")); + assertThat(content, containsString("")); + assertThat(content, containsString("")); + assertThat(content, containsString("test")); + assertThat(content, containsString("third")); + assertThat(content, containsString("metadata")); + assertThat(content, containsString("notes third")); + assertThat(content, containsString("")); + assertThat(content, containsString("")); + assertThat(content, containsString("test")); + assertThat(content, containsString("element")); + assertThat(content, containsString("")); + } + } + + @Test + public void testMetadataNotExistingSchemaExport() throws Exception { + + File xml = new File(fileLocation + "/test-types.xml"); + xml.deleteOnExit(); + + String[] args = + new String[] { + "export-schema", + "-i", "-1", + "-f", xml.getAbsolutePath() + }; + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + + handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); + + assertThat(handler.getErrorMessages(), hasItem("Cannot find the metadata-schema with id: -1")); + assertThat("The xml file should not be created", xml.exists(), is(false)); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/app/requestitem/JavaMailTestTransport.java b/dspace-api/src/test/java/org/dspace/app/requestitem/JavaMailTestTransport.java new file mode 100644 index 000000000000..96cf00c312ba --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/requestitem/JavaMailTestTransport.java @@ -0,0 +1,65 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.requestitem; + +import javax.mail.Address; +import javax.mail.Message; +import javax.mail.MessagingException; +import javax.mail.Session; +import javax.mail.Transport; +import javax.mail.URLName; + +/** + * A dummy load for SMTP transport, which saves the last message "sent" for + * later inspection. See the {@link getMessage()} and {@link getAddresses()} + * methods for access to the message. Sending a new message through an instance + * of this Transport discards the previous message. + * + *

      This class is not thread-safe. + * + * @author mwood + */ +public class JavaMailTestTransport + extends Transport { + private static Message msg; + private static Address[] adrss; + + public JavaMailTestTransport(Session session, URLName urlname) { + super(session, urlname); + } + + @Override + public void sendMessage(Message aMsg, Address[] aAdrss) + throws MessagingException { + msg = aMsg; + adrss = aAdrss; + } + + @Override + public void connect(String host, int port, String user, String password) { } + + /* *** Implementation-specific methods. *** */ + + /** + * Access the most recent saved message. + * + * @return saved message. + */ + public static Message getMessage() { + return msg; + } + + /** + * Access the most recent saved addresses. + * + * @return saved addresses. + */ + public static Address[] getAddresses() { + return adrss; + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemEmailNotifierTest.java b/dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemEmailNotifierTest.java new file mode 100644 index 000000000000..713e007c58a2 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemEmailNotifierTest.java @@ -0,0 +1,271 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.requestitem; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; +import static org.junit.Assert.assertEquals; + +import javax.mail.Address; +import javax.mail.Message; +import javax.mail.Provider; +import javax.mail.Session; +import javax.mail.internet.InternetAddress; + +import org.dspace.AbstractUnitTest; +import org.dspace.app.requestitem.factory.RequestItemServiceFactory; +import org.dspace.app.requestitem.service.RequestItemService; +import org.dspace.builder.AbstractBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.BitstreamService; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.handle.service.HandleService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.BeforeClass; +import org.junit.Ignore; +import org.junit.Test; + +/** + * Tests for {@link RequestItemEmailNotifier}. + * + * @author mwood + */ +public class RequestItemEmailNotifierTest + extends AbstractUnitTest { + + public static final String TRANSPORT_CLASS_KEY = "mail.smtp.class"; + + private static final String REQUESTOR_ADDRESS = "mhwood@wood.net"; + private static final String REQUESTOR_NAME = "Mark Wood"; + private static final String HELPDESK_ADDRESS = "help@example.com"; + private static final String HELPDESK_NAME = "Help Desk"; + private static final String TEST_MESSAGE = "Message"; + private static final String DUMMY_PROTO = "dummy"; + + private static ConfigurationService configurationService; + private static BitstreamService bitstreamService; + private static HandleService handleService; + private static RequestItemService requestItemService; + + public RequestItemEmailNotifierTest() { + super(); + } + + @BeforeClass + public static void setUpClass() { + AbstractBuilder.init(); // AbstractUnitTest doesn't do this for us. + + configurationService + = DSpaceServicesFactory.getInstance().getConfigurationService(); + bitstreamService + = ContentServiceFactory.getInstance().getBitstreamService(); + handleService + = HandleServiceFactory.getInstance().getHandleService(); + requestItemService + = RequestItemServiceFactory.getInstance().getRequestItemService(); + } + + /** + * Test of sendRequest method, of class RequestItemEmailNotifier. + * @throws java.lang.Exception passed through. + */ + @Ignore + @Test + public void testSendRequest() throws Exception { + } + + /** + * Test of sendResponse method, of class RequestItemEmailNotifier. + * @throws java.lang.Exception passed through. + */ + @Test + public void testSendResponse() throws Exception { + // Create some content to send. + context.turnOffAuthorisationSystem(); + Community com = CommunityBuilder.createCommunity(context) + .withName("Top Community") + .build(); + Collection col = CollectionBuilder.createCollection(context, com) + .build(); + Item item = ItemBuilder.createItem(context, col) + .withTitle("Test Item") + .build(); + context.restoreAuthSystemState(); + + // Create a request to which we can respond. + RequestItem ri = new RequestItem(); + ri.setAccept_request(true); + ri.setItem(item); + ri.setAllfiles(true); + ri.setReqEmail(REQUESTOR_ADDRESS); + ri.setReqName(REQUESTOR_NAME); + + // Install a fake transport for RFC2822 email addresses. + Session session = DSpaceServicesFactory.getInstance().getEmailService().getSession(); + Provider transportProvider = new Provider(Provider.Type.TRANSPORT, + DUMMY_PROTO, JavaMailTestTransport.class.getCanonicalName(), + "DSpace", "1.0"); + session.addProvider(transportProvider); + session.setProvider(transportProvider); + session.setProtocolForAddress("rfc822", DUMMY_PROTO); + + // Configure the help desk strategy. + configurationService.setProperty("mail.helpdesk", HELPDESK_ADDRESS); + configurationService.setProperty("mail.helpdesk.name", HELPDESK_NAME); + configurationService.setProperty("request.item.helpdesk.override", "true"); + + // Ensure that mail is "sent". + configurationService.setProperty("mail.server.disabled", "false"); + + // Instantiate and initialize the unit, using the "help desk" strategy. + RequestItemEmailNotifier requestItemEmailNotifier + = new RequestItemEmailNotifier( + DSpaceServicesFactory.getInstance() + .getServiceManager() + .getServiceByName(RequestItemHelpdeskStrategy.class.getName(), + RequestItemAuthorExtractor.class)); + requestItemEmailNotifier.bitstreamService = bitstreamService; + requestItemEmailNotifier.configurationService = configurationService; + requestItemEmailNotifier.handleService = handleService; + requestItemEmailNotifier.requestItemService = requestItemService; + + // Test the unit. Template supplies the Subject: value + requestItemEmailNotifier.sendResponse(context, ri, null, TEST_MESSAGE); + + // Evaluate the test results. + + // Check the To: address. + Address[] myAddresses = JavaMailTestTransport.getAddresses(); + assertEquals("Should have one To: address.", + myAddresses.length, 1); + assertThat("To: should be an Internet address", + myAddresses[0], instanceOf(InternetAddress.class)); + String address = ((InternetAddress)myAddresses[0]).getAddress(); + assertEquals("To: address should match requestor.", + ri.getReqEmail(), address); + + // Check the message body. + Message myMessage = JavaMailTestTransport.getMessage(); + + Object content = myMessage.getContent(); + assertThat("Body should be a single text bodypart", + content, instanceOf(String.class)); + + assertThat("Should contain the helpdesk name", + (String)content, containsString(HELPDESK_NAME)); + + assertThat("Should contain the test custom message", + (String)content, containsString(TEST_MESSAGE)); + } + + /** + * Test of sendResponse method -- rejection case. + * @throws java.lang.Exception passed through. + */ + @Test + public void testSendRejection() + throws Exception { + // Create some content to send. + context.turnOffAuthorisationSystem(); + Community com = CommunityBuilder.createCommunity(context) + .withName("Top Community") + .build(); + Collection col = CollectionBuilder.createCollection(context, com) + .build(); + Item item = ItemBuilder.createItem(context, col) + .withTitle("Test Item") + .build(); + context.restoreAuthSystemState(); + + // Create a request to which we can respond. + RequestItem ri = new RequestItem(); + ri.setAccept_request(false); + ri.setItem(item); + ri.setAllfiles(true); + ri.setReqEmail(REQUESTOR_ADDRESS); + ri.setReqName(REQUESTOR_NAME); + + // Install a fake transport for RFC2822 email addresses. + Session session = DSpaceServicesFactory.getInstance().getEmailService().getSession(); + Provider transportProvider = new Provider(Provider.Type.TRANSPORT, + DUMMY_PROTO, JavaMailTestTransport.class.getCanonicalName(), + "DSpace", "1.0"); + session.addProvider(transportProvider); + session.setProvider(transportProvider); + session.setProtocolForAddress("rfc822", DUMMY_PROTO); + + // Configure the help desk strategy. + configurationService.setProperty("mail.helpdesk", HELPDESK_ADDRESS); + configurationService.setProperty("mail.helpdesk.name", HELPDESK_NAME); + configurationService.setProperty("request.item.helpdesk.override", "true"); + + // Ensure that mail is "sent". + configurationService.setProperty("mail.server.disabled", "false"); + + // Instantiate and initialize the unit, using the "help desk" strategy. + RequestItemEmailNotifier requestItemEmailNotifier + = new RequestItemEmailNotifier( + DSpaceServicesFactory.getInstance() + .getServiceManager() + .getServiceByName(RequestItemHelpdeskStrategy.class.getName(), + RequestItemAuthorExtractor.class)); + requestItemEmailNotifier.bitstreamService = bitstreamService; + requestItemEmailNotifier.configurationService = configurationService; + requestItemEmailNotifier.handleService = handleService; + requestItemEmailNotifier.requestItemService = requestItemService; + + // Test the unit. Template supplies the Subject: value + requestItemEmailNotifier.sendResponse(context, ri, null, TEST_MESSAGE); + + // Evaluate the test results. + + // Check the To: address. + Address[] myAddresses = JavaMailTestTransport.getAddresses(); + assertEquals("Should have one To: address.", + myAddresses.length, 1); + assertThat("To: should be an Internet address", + myAddresses[0], instanceOf(InternetAddress.class)); + String address = ((InternetAddress)myAddresses[0]).getAddress(); + assertEquals("To: address should match requestor.", + ri.getReqEmail(), address); + + // Check the message body. + Message myMessage = JavaMailTestTransport.getMessage(); + + Object content = myMessage.getContent(); + assertThat("Body should be a single text bodypart", + content, instanceOf(String.class)); + + assertThat("Should contain the helpdesk name", + (String)content, containsString(HELPDESK_NAME)); + + assertThat("Should contain the test custom message", + (String)content, containsString(TEST_MESSAGE)); + + // FIXME Note that this depends on the content of the rejection template! + assertThat("Should contain the word 'denied'.", + (String)content, containsString("denied")); + } + + /** + * Test of requestOpenAccess method, of class RequestItemEmailNotifier. + * @throws java.lang.Exception passed through. + */ + @Ignore + @Test + public void testRequestOpenAccess() throws Exception { + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/util/SubmissionConfigTest.java b/dspace-api/src/test/java/org/dspace/app/util/SubmissionConfigTest.java index be4d6a12dae2..cb1f828b93c4 100644 --- a/dspace-api/src/test/java/org/dspace/app/util/SubmissionConfigTest.java +++ b/dspace-api/src/test/java/org/dspace/app/util/SubmissionConfigTest.java @@ -14,6 +14,7 @@ import java.util.List; import org.dspace.AbstractUnitTest; +import org.dspace.submit.factory.SubmissionServiceFactory; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -65,7 +66,8 @@ public void testReadAndProcessTypeBindSubmissionConfig() // Get submission configuration SubmissionConfig submissionConfig = - new SubmissionConfigReader().getSubmissionConfigByCollection(typeBindHandle); + SubmissionServiceFactory.getInstance().getSubmissionConfigService() + .getSubmissionConfigByCollection(typeBindHandle); // Submission name should match name defined in item-submission.xml assertEquals(typeBindSubmissionName, submissionConfig.getSubmissionName()); // Step 0 - our process only has one step. It should not be null and have the ID typebindtest diff --git a/dspace-api/src/test/java/org/dspace/authority/orcid/MockOrcid.java b/dspace-api/src/test/java/org/dspace/authority/orcid/MockOrcid.java index 562aa86a585e..88c29fd23344 100644 --- a/dspace-api/src/test/java/org/dspace/authority/orcid/MockOrcid.java +++ b/dspace-api/src/test/java/org/dspace/authority/orcid/MockOrcid.java @@ -51,6 +51,14 @@ public InputStream answer(InvocationOnMock invocation) { } }); + when(orcidRestConnector.get(ArgumentMatchers.matches("^\\d{4}-\\d{4}-\\d{4}-\\d{4}$"), ArgumentMatchers.any())) + .thenAnswer(new Answer() { + @Override + public InputStream answer(InvocationOnMock invocation) { + return this.getClass().getResourceAsStream("orcid-record.xml"); + } + }); + setOrcidRestConnector(orcidRestConnector); } diff --git a/dspace-api/src/test/java/org/dspace/authorize/RegexPasswordValidatorTest.java b/dspace-api/src/test/java/org/dspace/authorize/RegexPasswordValidatorIT.java similarity index 97% rename from dspace-api/src/test/java/org/dspace/authorize/RegexPasswordValidatorTest.java rename to dspace-api/src/test/java/org/dspace/authorize/RegexPasswordValidatorIT.java index df333fa500c9..7286fb8e8374 100644 --- a/dspace-api/src/test/java/org/dspace/authorize/RegexPasswordValidatorTest.java +++ b/dspace-api/src/test/java/org/dspace/authorize/RegexPasswordValidatorIT.java @@ -26,7 +26,7 @@ * @author Luca Giamminonni (luca.giamminonni at 4science.it) */ @RunWith(MockitoJUnitRunner.class) -public class RegexPasswordValidatorTest extends AbstractIntegrationTest { +public class RegexPasswordValidatorIT extends AbstractIntegrationTest { @Mock private ConfigurationService configurationService; diff --git a/dspace-api/src/test/java/org/dspace/browse/CrossLinksTest.java b/dspace-api/src/test/java/org/dspace/browse/CrossLinksTest.java new file mode 100644 index 000000000000..83aab72d904e --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/browse/CrossLinksTest.java @@ -0,0 +1,103 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.browse; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import org.dspace.AbstractDSpaceTest; +import org.dspace.services.ConfigurationService; +import org.dspace.utils.DSpace; +import org.junit.Before; +import org.junit.Test; + +/** + * Test class for {@link CrossLinks} + */ +public class CrossLinksTest extends AbstractDSpaceTest { + protected ConfigurationService configurationService; + + + @Before + public void setUp() { + configurationService = new DSpace().getConfigurationService(); + } + + @Test + public void testFindLinkType_Null() throws Exception { + CrossLinks crossLinks = new CrossLinks(); + assertNull(crossLinks.findLinkType(null)); + } + + @Test + public void testFindLinkType_NoMatch() throws Exception { + CrossLinks crossLinks = new CrossLinks(); + String metadataField = "foo.bar.baz.does.not.exist"; + assertNull(crossLinks.findLinkType(metadataField)); + } + + @Test + public void testFindLinkType_WildcardMatch() throws Exception { + configurationService.setProperty("webui.browse.link.1", "author:dc.contributor.*"); + CrossLinks crossLinks = new CrossLinks(); + + String metadataField = "dc.contributor.author"; + assertEquals("author",crossLinks.findLinkType(metadataField)); + } + + @Test + public void testFindLinkType_SingleExactMatch_Author() throws Exception { + configurationService.setProperty("webui.browse.link.1", "author:dc.contributor.author"); + CrossLinks crossLinks = new CrossLinks(); + + assertEquals("type",crossLinks.findLinkType("dc.genre")); + assertEquals("author",crossLinks.findLinkType("dc.contributor.author")); + } + + @Test + public void testFindLinkType_SingleExactMatch_Type() throws Exception { + configurationService.setProperty("webui.browse.link.1", "type:dc.genre"); + CrossLinks crossLinks = new CrossLinks(); + + assertEquals("type",crossLinks.findLinkType("dc.genre")); + } + + @Test + public void testFindLinkType_MultipleExactMatches_DifferentIndexes() throws Exception { + configurationService.setProperty("webui.browse.link.1", "author:dc.contributor.author"); + configurationService.setProperty("webui.browse.link.2", "type:dc.genre"); + CrossLinks crossLinks = new CrossLinks(); + + assertEquals("author",crossLinks.findLinkType("dc.contributor.author")); + assertEquals("type",crossLinks.findLinkType("dc.genre")); + } + + @Test + public void testFindLinkType_MultipleWildcardMatches_DifferentIndexes() throws Exception { + configurationService.setProperty("webui.browse.link.1", "author:dc.contributor.*"); + configurationService.setProperty("webui.browse.link.2", "subject:dc.subject.*"); + CrossLinks crossLinks = new CrossLinks(); + + assertEquals("author",crossLinks.findLinkType("dc.contributor.author")); + assertEquals("subject",crossLinks.findLinkType("dc.subject.lcsh")); + } + + @Test + public void testFindLinkType_MultiplExactAndWildcardMatches_DifferentIndexes() throws Exception { + configurationService.setProperty("webui.browse.link.1", "author:dc.contributor.*"); + configurationService.setProperty("webui.browse.link.2", "subject:dc.subject.*"); + configurationService.setProperty("webui.browse.link.3", "type:dc.genre"); + configurationService.setProperty("webui.browse.link.4", "dateissued:dc.date.issued"); + CrossLinks crossLinks = new CrossLinks(); + + assertEquals("author",crossLinks.findLinkType("dc.contributor.author")); + assertEquals("subject",crossLinks.findLinkType("dc.subject.lcsh")); + assertEquals("type",crossLinks.findLinkType("dc.genre")); + assertEquals("dateissued",crossLinks.findLinkType("dc.date.issued")); + } +} diff --git a/dspace-api/src/test/java/org/dspace/builder/AbstractBuilder.java b/dspace-api/src/test/java/org/dspace/builder/AbstractBuilder.java index c2a2e52d7db7..8bfe47e5a8c2 100644 --- a/dspace-api/src/test/java/org/dspace/builder/AbstractBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/AbstractBuilder.java @@ -20,6 +20,7 @@ import org.dspace.app.requestitem.factory.RequestItemServiceFactory; import org.dspace.app.requestitem.service.RequestItemService; import org.dspace.app.suggestion.SolrSuggestionStorageService; +import org.dspace.app.util.SubmissionConfigReaderException; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.factory.AuthorizeServiceFactory; import org.dspace.authorize.service.AuthorizeService; @@ -64,6 +65,8 @@ import org.dspace.scripts.factory.ScriptServiceFactory; import org.dspace.scripts.service.ProcessService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.submit.factory.SubmissionServiceFactory; +import org.dspace.submit.service.SubmissionConfigService; import org.dspace.supervision.factory.SupervisionOrderServiceFactory; import org.dspace.supervision.service.SupervisionOrderService; import org.dspace.utils.DSpace; @@ -132,6 +135,7 @@ public abstract class AbstractBuilder { static OrcidQueueService orcidQueueService; static OrcidTokenService orcidTokenService; static SystemWideAlertService systemWideAlertService; + static SubmissionConfigService submissionConfigService; static SupervisionOrderService supervisionOrderService; @@ -206,6 +210,11 @@ public static void init() { orcidTokenService = OrcidServiceFactory.getInstance().getOrcidTokenService(); systemWideAlertService = DSpaceServicesFactory.getInstance().getServiceManager() .getServicesByType(SystemWideAlertService.class).get(0); + try { + submissionConfigService = SubmissionServiceFactory.getInstance().getSubmissionConfigService(); + } catch (SubmissionConfigReaderException e) { + log.error(e.getMessage(), e); + } subscribeService = ContentServiceFactory.getInstance().getSubscribeService(); supervisionOrderService = SupervisionOrderServiceFactory.getInstance().getSupervisionOrderService(); } @@ -253,6 +262,7 @@ public static void destroy() { versioningService = null; orcidTokenService = null; systemWideAlertService = null; + submissionConfigService = null; subscribeService = null; supervisionOrderService = null; } diff --git a/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java b/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java index b3a41b703d80..4b1466a858d8 100644 --- a/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java @@ -179,8 +179,8 @@ protected > B setOnlyReadPermission(DSp return (B) this; } /** - * Support method to grant the {@link Constants#READ} permission over an object only to a specific group. Any other - * READ permissions will be removed + * Support method to grant the {@link Constants#ADMIN} permission over an object only to a specific eperson. + * If another ADMIN policy is in place for an eperson it will be replaced * * @param dso * the DSpaceObject on which grant the permission diff --git a/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java b/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java index 0aaa84c872aa..dbcf3a7972b7 100644 --- a/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java @@ -17,7 +17,12 @@ import org.dspace.content.BitstreamFormat; import org.dspace.content.Bundle; import org.dspace.content.Item; +import org.dspace.content.MetadataField; +import org.dspace.content.MetadataValue; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.DSpaceObjectService; +import org.dspace.content.service.MetadataValueService; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.eperson.Group; @@ -54,6 +59,13 @@ public static BitstreamBuilder createBitstream(Context context, Item item, Input return builder.createInRequestedBundle(context, item, is, bundleName); } + public static BitstreamBuilder createBitstream(Context context, Item item, InputStream is, + String bundleName, boolean iiifEnabled) + throws SQLException, AuthorizeException, IOException { + BitstreamBuilder builder = new BitstreamBuilder(context); + return builder.createInRequestedBundleWithIiifDisabled(context, item, is, bundleName, iiifEnabled); + } + private BitstreamBuilder create(Context context, Item item, InputStream is) throws SQLException, AuthorizeException, IOException { this.context = context; @@ -87,6 +99,41 @@ private BitstreamBuilder createInRequestedBundle(Context context, Item item, Inp return this; } + private BitstreamBuilder createInRequestedBundleWithIiifDisabled(Context context, Item item, InputStream is, + String bundleName, boolean iiifEnabled) + throws SQLException, AuthorizeException, IOException { + this.context = context; + this.item = item; + + Bundle bundle = getBundleByNameAndIiiEnabled(item, bundleName, iiifEnabled); + + bitstream = bitstreamService.create(context, bundle, is); + + return this; + } + + private Bundle getBundleByNameAndIiiEnabled(Item item, String bundleName, boolean iiifEnabled) + throws SQLException, AuthorizeException { + List bundles = itemService.getBundles(item, bundleName); + Bundle targetBundle = null; + + if (bundles.size() < 1) { + // not found, create a new one + targetBundle = bundleService.create(context, item, bundleName); + MetadataValueService metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService(); + MetadataField iiifEnabledField = metadataFieldService. + findByString(context, "dspace.iiif.enabled", '.'); + MetadataValue metadataValue = metadataValueService.create(context, targetBundle, iiifEnabledField); + metadataValue.setValue(String.valueOf(iiifEnabled)); + + } else { + // put bitstreams into first bundle + targetBundle = bundles.iterator().next(); + } + return targetBundle; + } + + private Bundle getBundleByName(Item item, String bundleName) throws SQLException, AuthorizeException { List bundles = itemService.getBundles(item, bundleName); Bundle targetBundle = null; @@ -121,6 +168,19 @@ public BitstreamBuilder withMimeType(String mimeType) throws SQLException { return this; } + /** + * Guess the bitstream format as during the submission via the + * {@link BitstreamFormatService#guessFormat(Context, Bitstream)} + * + * @return the BitstreamBuilder with the format set according to + * {@link BitstreamFormatService#guessFormat(Context, Bitstream)} + * @throws SQLException + */ + public BitstreamBuilder guessFormat() throws SQLException { + bitstream.setFormat(context, bitstreamFormatService.guessFormat(context, bitstream)); + return this; + } + public BitstreamBuilder withFormat(String format) throws SQLException { bitstreamService.addMetadata(context, bitstream, "dc", "format", null, null, format); @@ -142,6 +202,11 @@ public BitstreamBuilder withType(String type) throws SQLException { return this; } + public BitstreamBuilder withIIIFDisabled() throws SQLException { + bitstreamService.addMetadata(context, bitstream, "dspace", "iiif", "enabled", null, "false"); + return this; + } + public BitstreamBuilder withIIIFLabel(String label) throws SQLException { bitstreamService.addMetadata(context, bitstream, "iiif", "label", null, null, label); return this; diff --git a/dspace-api/src/test/java/org/dspace/builder/CollectionBuilder.java b/dspace-api/src/test/java/org/dspace/builder/CollectionBuilder.java index c4a29bc9b015..2fee83afa924 100644 --- a/dspace-api/src/test/java/org/dspace/builder/CollectionBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/CollectionBuilder.java @@ -7,6 +7,8 @@ */ package org.dspace.builder; +import static org.dspace.core.Constants.DEFAULT_ITEM_READ; + import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; @@ -15,6 +17,7 @@ import org.apache.commons.io.IOUtils; import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.ResourcePolicy; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.MetadataSchemaEnum; @@ -273,6 +276,28 @@ public CollectionBuilder withSharedWorkspace() { return setMetadataSingleValue(collection, "cris", "workspace", "shared", "true"); } + /** + * remove the resource policies with type DEFAULT_ITEM_READ and + * add new policy with type DEFAULT_ITEM_READ of + * the new group to current collection. + * + * @param group the group + * @return this builder + * @throws SQLException passed through. + * @throws AuthorizeException passed through. + */ + public CollectionBuilder withDefaultItemRead(Group group) throws SQLException, AuthorizeException { + resourcePolicyService.removePolicies(context, collection, DEFAULT_ITEM_READ); + + ResourcePolicy resourcePolicy = resourcePolicyService.create(context); + resourcePolicy.setGroup(group); + resourcePolicy.setAction(DEFAULT_ITEM_READ); + resourcePolicy.setdSpaceObject(collection); + resourcePolicyService.update(context, resourcePolicy); + return this; + } + + @Override public Collection build() { try { diff --git a/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java b/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java index 02b6363b30b1..8db3b7ae9d66 100644 --- a/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java @@ -32,27 +32,38 @@ public class CommunityBuilder extends AbstractDSpaceObjectBuilder { private Community community; + protected CommunityBuilder(Context context) { super(context); } public static CommunityBuilder createCommunity(final Context context) { CommunityBuilder builder = new CommunityBuilder(context); - return builder.create(); + return builder.create(null); + } + public static CommunityBuilder createCommunity(final Context context, String handle) { + CommunityBuilder builder = new CommunityBuilder(context); + return builder.create(handle); } - private CommunityBuilder create() { - return createSubCommunity(context, null); + private CommunityBuilder create(String handle) { + return createSubCommunity(context, null, handle); } public static CommunityBuilder createSubCommunity(final Context context, final Community parent) { CommunityBuilder builder = new CommunityBuilder(context); - return builder.createSub(parent); + return builder.createSub(parent, null); } - private CommunityBuilder createSub(final Community parent) { + public static CommunityBuilder createSubCommunity(final Context context, final Community parent, + final String handle) { + CommunityBuilder builder = new CommunityBuilder(context); + return builder.createSub(parent, handle); + } + + private CommunityBuilder createSub(final Community parent, String handle) { try { - community = communityService.create(parent, context); + community = communityService.create(parent, context, handle); } catch (Exception e) { e.printStackTrace(); return null; @@ -106,6 +117,7 @@ public CommunityBuilder addParentCommunity(final Context context, final Communit @Override public Community build() { try { + communityService.update(context, community); context.dispatchEvents(); diff --git a/dspace-api/src/test/java/org/dspace/builder/CrisLayoutBoxBuilder.java b/dspace-api/src/test/java/org/dspace/builder/CrisLayoutBoxBuilder.java index cf80c8778dce..59784b192ed4 100644 --- a/dspace-api/src/test/java/org/dspace/builder/CrisLayoutBoxBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/CrisLayoutBoxBuilder.java @@ -19,7 +19,9 @@ import org.dspace.content.MetadataField; import org.dspace.core.Context; import org.dspace.discovery.SearchServiceException; +import org.dspace.eperson.Group; import org.dspace.layout.CrisLayoutBox; +import org.dspace.layout.CrisLayoutBox2SecurityGroup; import org.dspace.layout.CrisLayoutBoxTypes; import org.dspace.layout.CrisLayoutField; import org.dspace.layout.LayoutSecurity; @@ -163,6 +165,17 @@ public CrisLayoutBoxBuilder addMetadataSecurityField(MetadataField field) { return this; } + public CrisLayoutBoxBuilder addBox2SecurityGroups(Group group, CrisLayoutBox alternativeBox) throws SQLException { + if (this.box.getBox2SecurityGroups() == null) { + this.box.setBox2SecurityGroups(new HashSet<>()); + } + this.box.getBox2SecurityGroups().add( + new CrisLayoutBox2SecurityGroup(new CrisLayoutBox2SecurityGroup.CrisLayoutBox2SecurityGroupId(box, group), + box, group, alternativeBox) + ); + return this; + } + public CrisLayoutBoxBuilder withContainer(boolean container) { this.box.setContainer(container); return this; diff --git a/dspace-api/src/test/java/org/dspace/builder/CrisLayoutTabBuilder.java b/dspace-api/src/test/java/org/dspace/builder/CrisLayoutTabBuilder.java index 70abf7b8ae9d..4736324f4d35 100644 --- a/dspace-api/src/test/java/org/dspace/builder/CrisLayoutTabBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/CrisLayoutTabBuilder.java @@ -20,10 +20,12 @@ import org.dspace.content.MetadataField; import org.dspace.core.Context; import org.dspace.discovery.SearchServiceException; +import org.dspace.eperson.Group; import org.dspace.layout.CrisLayoutBox; import org.dspace.layout.CrisLayoutCell; import org.dspace.layout.CrisLayoutRow; import org.dspace.layout.CrisLayoutTab; +import org.dspace.layout.CrisLayoutTab2SecurityGroup; import org.dspace.layout.LayoutSecurity; import org.dspace.layout.service.CrisLayoutTabService; @@ -117,6 +119,11 @@ public CrisLayoutTabBuilder withShortName(String shortName) { return this; } + public CrisLayoutTabBuilder withCustomFilter(String customFilter) { + this.tab.setCustomFilter(customFilter); + return this; + } + public CrisLayoutTabBuilder withHeader(String header) { this.tab.setHeader(header); return this; @@ -202,4 +209,15 @@ public CrisLayoutTabBuilder addMetadatasecurity(MetadataField metadataField) { this.tab.getMetadataSecurityFields().add(metadataField); return this; } + + public CrisLayoutTabBuilder addTab2SecurityGroups(Group group, CrisLayoutTab alternativeTab) { + if (this.tab.getTab2SecurityGroups() == null) { + this.tab.setTab2SecurityGroups(new HashSet<>()); + } + this.tab.getTab2SecurityGroups().add( + new CrisLayoutTab2SecurityGroup(new CrisLayoutTab2SecurityGroup.CrisLayoutTab2SecurityGroupId(tab, group), + tab, group, alternativeTab) + ); + return this; + } } diff --git a/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java index 0665441b3d2b..b9eedd2fcf42 100644 --- a/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java @@ -848,9 +848,9 @@ public ItemBuilder withOtherIdentifier(String identifier) { } /** - * Create an admin group for the collection with the specified members + * Assign the admin permission to the specified eperson * - * @param ePerson eperson to add to the admin group + * @param ePerson the eperson that will get the ADMIN permission on the item * @return this builder * @throws SQLException * @throws AuthorizeException diff --git a/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java index 86573940e416..0631e1b55a37 100644 --- a/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java @@ -113,6 +113,9 @@ public void delete(Context c, Process dso) throws Exception { } public static void deleteProcess(Integer integer) throws SQLException, IOException { + if (integer == null) { + return; + } try (Context c = new Context()) { c.turnOffAuthorisationSystem(); Process process = processService.find(c, integer); diff --git a/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java b/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java index 33319dde66b0..e06719175882 100644 --- a/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java @@ -190,10 +190,6 @@ public WorkspaceItemBuilder withIssueDate(final String issueDate) { return addMetadataValue(MetadataSchemaEnum.DC.getName(), "date", "issued", new DCDate(issueDate).toString()); } - public WorkspaceItemBuilder withType(final String type) { - return addMetadataValue(MetadataSchemaEnum.DC.getName(), "type", null, type); - } - public WorkspaceItemBuilder withAuthor(final String authorName) { return addMetadataValue(MetadataSchemaEnum.DC.getName(), "contributor", "author", authorName); } @@ -272,6 +268,10 @@ public WorkspaceItemBuilder withAbstract(final String subject) { return addMetadataValue(MetadataSchemaEnum.DC.getName(),"description", "abstract", subject); } + public WorkspaceItemBuilder withType(final String type) { + return addMetadataValue(MetadataSchemaEnum.DC.getName(),"type", null, type); + } + public WorkspaceItemBuilder grantLicense() { Item item = workspaceItem.getItem(); String license; diff --git a/dspace-api/src/test/java/org/dspace/content/BitstreamTest.java b/dspace-api/src/test/java/org/dspace/content/BitstreamTest.java index 921e4efcc7d8..e85a0fc7b78d 100644 --- a/dspace-api/src/test/java/org/dspace/content/BitstreamTest.java +++ b/dspace-api/src/test/java/org/dspace/content/BitstreamTest.java @@ -432,6 +432,51 @@ public void testDeleteAndExpunge() throws IOException, SQLException, AuthorizeEx assertThat("testExpunge 0", bitstreamService.find(context, bitstreamId), nullValue()); } + /** + * Test of delete method, of class Bitstream. + */ + @Test + public void testDeleteBitstreamAndUnsetPrimaryBitstreamID() + throws IOException, SQLException, AuthorizeException { + + context.turnOffAuthorisationSystem(); + + Community owningCommunity = communityService.create(null, context); + Collection collection = collectionService.create(context, owningCommunity); + WorkspaceItem workspaceItem = workspaceItemService.create(context, collection, false); + Item item = installItemService.installItem(context, workspaceItem); + Bundle b = bundleService.create(context, item, "TESTBUNDLE"); + + // Allow Bundle REMOVE permissions + doNothing().when(authorizeServiceSpy).authorizeAction(context, b, Constants.REMOVE); + // Allow Bitstream WRITE permissions + doNothing().when(authorizeServiceSpy) + .authorizeAction(any(Context.class), any(Bitstream.class), eq(Constants.WRITE)); + // Allow Bitstream DELETE permissions + doNothing().when(authorizeServiceSpy) + .authorizeAction(any(Context.class), any(Bitstream.class), eq(Constants.DELETE)); + + //set a value different than default + File f = new File(testProps.get("test.bitstream").toString()); + + // Create a new bitstream, which we can delete. + Bitstream delBS = bitstreamService.create(context, new FileInputStream(f)); + bundleService.addBitstream(context, b, delBS); + // set primary bitstream + b.setPrimaryBitstreamID(delBS); + context.restoreAuthSystemState(); + + // Test that delete will flag the bitstream as deleted + assertFalse("testDeleteBitstreamAndUnsetPrimaryBitstreamID 0", delBS.isDeleted()); + assertThat("testDeleteBitstreamAndUnsetPrimaryBitstreamID 1", b.getPrimaryBitstream(), equalTo(delBS)); + // Delete bitstream + bitstreamService.delete(context, delBS); + assertTrue("testDeleteBitstreamAndUnsetPrimaryBitstreamID 2", delBS.isDeleted()); + + // Now test if the primary bitstream was unset from bundle + assertThat("testDeleteBitstreamAndUnsetPrimaryBitstreamID 3", b.getPrimaryBitstream(), equalTo(null)); + } + /** * Test of retrieve method, of class Bitstream. */ diff --git a/dspace-api/src/test/java/org/dspace/content/BundleTest.java b/dspace-api/src/test/java/org/dspace/content/BundleTest.java index 4ff35f5b4df8..4af64b81cb0c 100644 --- a/dspace-api/src/test/java/org/dspace/content/BundleTest.java +++ b/dspace-api/src/test/java/org/dspace/content/BundleTest.java @@ -513,6 +513,41 @@ public void testRemoveBitstreamAuth() throws SQLException, AuthorizeException, I } + /** + * Test removeBitstream method and also the unsetPrimaryBitstreamID method, of class Bundle. + */ + @Test + public void testRemoveBitstreamAuthAndUnsetPrimaryBitstreamID() + throws IOException, SQLException, AuthorizeException { + // Allow Item WRITE permissions + doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.WRITE); + // Allow Bundle ADD permissions + doNothing().when(authorizeServiceSpy).authorizeAction(context, b, Constants.ADD); + // Allow Bundle REMOVE permissions + doNothing().when(authorizeServiceSpy).authorizeAction(context, b, Constants.REMOVE); + // Allow Bitstream WRITE permissions + doNothing().when(authorizeServiceSpy) + .authorizeAction(any(Context.class), any(Bitstream.class), eq(Constants.WRITE)); + // Allow Bitstream DELETE permissions + doNothing().when(authorizeServiceSpy) + .authorizeAction(any(Context.class), any(Bitstream.class), eq(Constants.DELETE)); + + + context.turnOffAuthorisationSystem(); + //set a value different than default + File f = new File(testProps.get("test.bitstream").toString()); + Bitstream bs = bitstreamService.create(context, new FileInputStream(f)); + bundleService.addBitstream(context, b, bs); + b.setPrimaryBitstreamID(bs); + context.restoreAuthSystemState(); + + assertThat("testRemoveBitstreamAuthAndUnsetPrimaryBitstreamID 0", b.getPrimaryBitstream(), equalTo(bs)); + //remove bitstream + bundleService.removeBitstream(context, b, bs); + //is -1 when not set + assertThat("testRemoveBitstreamAuthAndUnsetPrimaryBitstreamID 1", b.getPrimaryBitstream(), equalTo(null)); + } + /** * Test of update method, of class Bundle. */ diff --git a/dspace-api/src/test/java/org/dspace/content/CollectionTest.java b/dspace-api/src/test/java/org/dspace/content/CollectionTest.java index 1548ebcae0d8..13d037abf823 100644 --- a/dspace-api/src/test/java/org/dspace/content/CollectionTest.java +++ b/dspace-api/src/test/java/org/dspace/content/CollectionTest.java @@ -725,9 +725,6 @@ public void testRemoveItemAuth() throws Exception { // Allow Item REMOVE perms doNothing().when(authorizeServiceSpy) .authorizeAction(any(Context.class), any(Item.class), eq(Constants.REMOVE)); - // Allow Item WRITE perms (Needed to remove identifiers, e.g. DOI, before Item deletion) - doNothing().when(authorizeServiceSpy) - .authorizeAction(any(Context.class), any(Item.class), eq(Constants.WRITE)); // create & add item first context.turnOffAuthorisationSystem(); diff --git a/dspace-api/src/test/java/org/dspace/content/ItemTest.java b/dspace-api/src/test/java/org/dspace/content/ItemTest.java index 5e95794bb77e..a9df8f22a423 100644 --- a/dspace-api/src/test/java/org/dspace/content/ItemTest.java +++ b/dspace-api/src/test/java/org/dspace/content/ItemTest.java @@ -1187,8 +1187,6 @@ public void testDeleteAuth() throws Exception { doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.REMOVE, true); // Allow Item DELETE perms doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.DELETE); - // Allow Item WRITE perms (required to first delete identifiers) - doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.WRITE); UUID id = item.getID(); itemService.delete(context, item); @@ -1393,6 +1391,78 @@ public void testInheritCollectionDefaultPolicies() throws Exception { assertTrue("testInheritCollectionDefaultPolicies 2", equals); } + // Test to verify DEFAULT_*_READ policies on collection inherit properly to Item/Bundle/Bitstream + @Test + public void testInheritCollectionDefaultPolicies_custom_default_groups() throws Exception { + context.turnOffAuthorisationSystem(); + // Create a new collection + Collection c = createCollection(); + // Create a custom group with DEFAULT_ITEM_READ privileges in this Collection + Group item_read_role = collectionService.createDefaultReadGroup(context, c, "ITEM", + Constants.DEFAULT_ITEM_READ); + // Create a custom group with DEFAULT_BITSTREAM_READ privileges in this Collection + Group bitstream_read_role = collectionService.createDefaultReadGroup(context, c, "BITSTREAM", + Constants.DEFAULT_BITSTREAM_READ); + context.restoreAuthSystemState(); + + // Verify that Collection's DEFAULT_ITEM_READ now uses the newly created group. + List defaultItemReadPolicies = + authorizeService.getPoliciesActionFilter(context, c, Constants.DEFAULT_ITEM_READ); + assertEquals("One DEFAULT_ITEM_READ policy", 1, defaultItemReadPolicies.size()); + assertEquals("DEFAULT_ITEM_READ group", item_read_role.getName(), + defaultItemReadPolicies.get(0).getGroup().getName()); + + // Verify that Collection's DEFAULT_BITSTREAM_READ now uses the newly created group. + List defaultBitstreamReadPolicies = + authorizeService.getPoliciesActionFilter(context, c, Constants.DEFAULT_BITSTREAM_READ); + assertEquals("One DEFAULT_BITSTREAM_READ policy on Collection", 1, defaultBitstreamReadPolicies.size()); + assertEquals("DEFAULT_BITSTREAM_READ group", bitstream_read_role.getName(), + defaultBitstreamReadPolicies.get(0).getGroup().getName()); + + context.turnOffAuthorisationSystem(); + // Create a new Item in this Collection + WorkspaceItem workspaceItem = workspaceItemService.create(context, c, false); + Item item = workspaceItem.getItem(); + // Add a single Bitstream to the ORIGINAL bundle + File f = new File(testProps.get("test.bitstream").toString()); + Bitstream bitstream = itemService.createSingleBitstream(context, new FileInputStream(f), item); + context.restoreAuthSystemState(); + + // Allow Item WRITE perms + doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.WRITE, true); + // Inherit all default policies from Collection down to new Item + itemService.inheritCollectionDefaultPolicies(context, item, c); + + // Verify Item inherits DEFAULT_ITEM_READ group from Collection + List itemReadPolicies = authorizeService.getPoliciesActionFilter(context, item, Constants.READ); + assertEquals("One READ policy on Item", 1, itemReadPolicies.size()); + assertEquals("Item's READ group", item_read_role.getName(), + itemReadPolicies.get(0).getGroup().getName()); + + // Verify Bitstream inherits DEFAULT_BITSTREAM_READ group from Collection + List bitstreamReadPolicies = authorizeService.getPoliciesActionFilter(context, bitstream, + Constants.READ); + assertEquals("One READ policy on Bitstream", 1, bitstreamReadPolicies.size()); + assertEquals("Bitstream's READ group", bitstream_read_role.getName(), + bitstreamReadPolicies.get(0).getGroup().getName()); + + // Verify ORIGINAL Bundle inherits DEFAULT_ITEM_READ group from Collection + // Bundles should inherit from DEFAULT_ITEM_READ so that if the item is readable, the files + // can be listed (even if files are access restricted or embargoed) + List bundles = item.getBundles(Constants.DEFAULT_BUNDLE_NAME); + Bundle originalBundle = bundles.get(0); + List bundleReadPolicies = authorizeService.getPoliciesActionFilter(context, originalBundle, + Constants.READ); + assertEquals("One READ policy on Bundle", 1, bundleReadPolicies.size()); + assertEquals("Bundles's READ group", item_read_role.getName(), + bundleReadPolicies.get(0).getGroup().getName()); + + // Cleanup after ourselves. Delete created collection & all content under it + context.turnOffAuthorisationSystem(); + collectionService.delete(context, c); + context.restoreAuthSystemState(); + } + /** * Test of move method, of class Item. */ diff --git a/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplVersioningTest.java b/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplVersioningIT.java similarity index 99% rename from dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplVersioningTest.java rename to dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplVersioningIT.java index d42213da2cf8..1b6f23032d57 100644 --- a/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplVersioningTest.java +++ b/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplVersioningIT.java @@ -26,7 +26,7 @@ import org.junit.Before; import org.junit.Test; -public class RelationshipServiceImplVersioningTest extends AbstractIntegrationTestWithDatabase { +public class RelationshipServiceImplVersioningIT extends AbstractIntegrationTestWithDatabase { private RelationshipService relationshipService; private RelationshipDAO relationshipDAO; diff --git a/dspace-api/src/test/java/org/dspace/content/VersioningWithRelationshipsTest.java b/dspace-api/src/test/java/org/dspace/content/VersioningWithRelationshipsIT.java similarity index 99% rename from dspace-api/src/test/java/org/dspace/content/VersioningWithRelationshipsTest.java rename to dspace-api/src/test/java/org/dspace/content/VersioningWithRelationshipsIT.java index d9d3ffd2baea..fc89e4b5fb97 100644 --- a/dspace-api/src/test/java/org/dspace/content/VersioningWithRelationshipsTest.java +++ b/dspace-api/src/test/java/org/dspace/content/VersioningWithRelationshipsIT.java @@ -71,7 +71,7 @@ import org.junit.Test; import org.springframework.beans.factory.config.AutowireCapableBeanFactory; -public class VersioningWithRelationshipsTest extends AbstractIntegrationTestWithDatabase { +public class VersioningWithRelationshipsIT extends AbstractIntegrationTestWithDatabase { private final RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); diff --git a/dspace-api/src/test/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumerIT.java b/dspace-api/src/test/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumerIT.java index aeba48df80ef..ad5ca83105bf 100644 --- a/dspace-api/src/test/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumerIT.java +++ b/dspace-api/src/test/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumerIT.java @@ -7,9 +7,15 @@ */ package org.dspace.content.authority; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import java.util.UUID; +import org.apache.solr.client.solrj.SolrQuery; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.common.SolrDocument; +import org.apache.solr.common.SolrDocumentList; import org.dspace.AbstractIntegrationTestWithDatabase; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; @@ -18,9 +24,19 @@ import org.dspace.content.Item; import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.MetadataValue; +import org.dspace.content.authority.factory.ContentAuthorityServiceFactory; +import org.dspace.content.authority.service.MetadataAuthorityService; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.ItemService; import org.dspace.core.Context; +import org.dspace.discovery.MockSolrSearchCore; +import org.dspace.event.ConsumerProfile; +import org.dspace.event.Dispatcher; +import org.dspace.event.factory.EventServiceFactory; +import org.dspace.event.service.EventService; +import org.dspace.kernel.ServiceManager; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -29,196 +45,346 @@ public class ReciprocalItemAuthorityConsumerIT extends AbstractIntegrationTestWi private final ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + private MockSolrSearchCore searchService; + + private ConfigurationService configurationService; + + private MetadataAuthorityService metadataAuthorityService; + + private EventService eventService; + @Override @Before public void setUp() throws Exception { super.setUp(); context.turnOffAuthorisationSystem(); + ServiceManager serviceManager = DSpaceServicesFactory.getInstance().getServiceManager(); + searchService = serviceManager.getServiceByName(null, MockSolrSearchCore.class); + configurationService = DSpaceServicesFactory.getInstance() + .getConfigurationService(); + metadataAuthorityService = ContentAuthorityServiceFactory.getInstance() + .getMetadataAuthorityService(); + eventService = EventServiceFactory.getInstance().getEventService(); + + configurationService.setProperty("ItemAuthority.reciprocalMetadata.Publication.dc.relation.product", + "dc.relation.publication"); + configurationService.setProperty("ItemAuthority.reciprocalMetadata.Product.dc.relation.publication", + "dc.relation.product"); + metadataAuthorityService.clearCache(); + + initializeReciprocalConfiguration(); + parentCommunity = CommunityBuilder.createCommunity(context) .withName("Parent Community") .build(); } @Test - public void testShouldCreatePublicationMetadataForProductItem() { - String productTitle = "productTitle"; - Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("product") - .withName("test_collection").build(); - Item productItem = ItemBuilder.createItem(context, productItemCollection) - .withPersonIdentifierFirstName("test_first_name") - .withPersonIdentifierLastName("test_second_name") - .withScopusAuthorIdentifier("test_author_identifier") - .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, productTitle) - .withType("product") - .build(); + public void testShouldCreatePublicationMetadataForProductItem() throws Exception { + try { + configurationService.setProperty("authority.controlled.dc.relation.product", "true"); + metadataAuthorityService.clearCache(); + String productTitle = "productTitle"; + Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("product") + .withName("test_collection").build(); + Item productItem = ItemBuilder.createItem(context, productItemCollection) + .withPersonIdentifierFirstName("test_first_name") + .withPersonIdentifierLastName("test_second_name") + .withScopusAuthorIdentifier("test_author_identifier") + .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, productTitle) + .withType("product") + .build(); - Collection publicationItemCollection = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("publication") - .withName("test_collection").build(); - Item publicationItem = ItemBuilder.createItem(context, publicationItemCollection) - .withPersonIdentifierFirstName("test_first_name") - .withPersonIdentifierLastName("test_second_name") - .withScopusAuthorIdentifier("test_author_identifier") - .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "publicationTitle") - .withMetadata(MetadataSchemaEnum.DC.getName(), "relation", - "product", null, productTitle, productItem.getID().toString(), Choices.CF_ACCEPTED) - .withType("publication") - .build(); + Collection publicationItemCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("publication") + .withName("test_collection").build(); + Item publicationItem = ItemBuilder.createItem(context, publicationItemCollection) + .withPersonIdentifierFirstName("test_first_name") + .withPersonIdentifierLastName("test_second_name") + .withScopusAuthorIdentifier("test_author_identifier") + .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "publicationTitle") + .withMetadata(MetadataSchemaEnum.DC.getName(), "relation", + "product", null, productTitle, productItem.getID().toString(), Choices.CF_ACCEPTED) + .withType("publication") + .build(); + + List metadataValues = itemService.getMetadataByMetadataString( + productItem, "dc.relation.publication"); + + Assert.assertEquals(1, metadataValues.size()); + Assert.assertNotNull(metadataValues.get(0)); + Assert.assertEquals(publicationItem.getID().toString(), metadataValues.get(0).getAuthority()); + Assert.assertEquals(publicationItem.getName(), metadataValues.get(0).getValue()); - List metadataValues = itemService.getMetadataByMetadataString( - productItem, "dc.relation.publication"); + SolrDocumentList solrDocumentList = getSolrDocumentList(productItem); + Assert.assertEquals(1, solrDocumentList.size()); + SolrDocument solrDoc = solrDocumentList.get(0); - Assert.assertEquals(1, metadataValues.size()); - Assert.assertNotNull(metadataValues.get(0)); - Assert.assertEquals(publicationItem.getID().toString(), metadataValues.get(0).getAuthority()); - Assert.assertEquals(publicationItem.getName(), metadataValues.get(0).getValue()); + List publicationTitles = (List) solrDoc.get("dc.relation.publication"); + Assert.assertEquals(1, publicationTitles.size()); + Assert.assertEquals(publicationItem.getName(), publicationTitles.get(0)); + + List publicationAuthorities = (List) solrDoc.get("dc.relation.publication_authority"); + Assert.assertEquals(1, publicationAuthorities.size()); + Assert.assertEquals(publicationItem.getID().toString(), publicationAuthorities.get(0)); + } finally { + configurationService.setProperty("authority.controlled.dc.relation.product", "false"); + metadataAuthorityService.clearCache(); + } } @Test - public void testShouldCreateProductMetadataForPublicationItem() { - String publicationTitle = "publicationTitle"; - Collection publicationItemCollection = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("publication") - .withName("test_collection").build(); - Item publicationItem = ItemBuilder.createItem(context, publicationItemCollection) - .withPersonIdentifierFirstName("test_first_name") - .withPersonIdentifierLastName("test_second_name") - .withScopusAuthorIdentifier("test_author_identifier") - .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, publicationTitle) - .withType("publication") - .build(); + public void testShouldCreateProductMetadataForPublicationItem() throws Exception { + try { + configurationService.setProperty("authority.controlled.dc.relation.product", "true"); + metadataAuthorityService.clearCache(); + String publicationTitle = "publicationTitle"; + Collection publicationItemCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("publication") + .withName("test_collection").build(); + Item publicationItem = ItemBuilder.createItem(context, publicationItemCollection) + .withPersonIdentifierFirstName("test_first_name") + .withPersonIdentifierLastName("test_second_name") + .withScopusAuthorIdentifier("test_author_identifier") + .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, publicationTitle) + .withType("publication") + .build(); - Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("product") - .withName("test_collection").build(); - Item productItem = ItemBuilder.createItem(context, productItemCollection) - .withPersonIdentifierFirstName("test_first_name") - .withPersonIdentifierLastName("test_second_name") - .withScopusAuthorIdentifier("test_author_identifier") - .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "productTitle") - .withMetadata(MetadataSchemaEnum.DC.getName(), "relation", "publication", - null, publicationTitle, publicationItem.getID().toString(), Choices.CF_ACCEPTED) - .withType("product") - .build(); + Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("product") + .withName("test_collection").build(); + Item productItem = ItemBuilder.createItem(context, productItemCollection) + .withPersonIdentifierFirstName("test_first_name") + .withPersonIdentifierLastName("test_second_name") + .withScopusAuthorIdentifier("test_author_identifier") + .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "productTitle") + .withMetadata(MetadataSchemaEnum.DC.getName(), "relation", "publication", + null, publicationTitle, publicationItem.getID().toString(), Choices.CF_ACCEPTED) + .withType("product") + .build(); + + List metadataValues = itemService.getMetadataByMetadataString( + publicationItem, "dc.relation.product"); + + Assert.assertEquals(1, metadataValues.size()); + Assert.assertNotNull(metadataValues.get(0)); + Assert.assertEquals(productItem.getID().toString(), metadataValues.get(0).getAuthority()); + Assert.assertEquals(productItem.getName(), metadataValues.get(0).getValue()); - List metadataValues = itemService.getMetadataByMetadataString( - publicationItem, "dc.relation.product"); + SolrDocumentList solrDocumentList = getSolrDocumentList(publicationItem); + Assert.assertEquals(1, solrDocumentList.size()); + SolrDocument solrDoc = solrDocumentList.get(0); - Assert.assertEquals(1, metadataValues.size()); - Assert.assertNotNull(metadataValues.get(0)); - Assert.assertEquals(productItem.getID().toString(), metadataValues.get(0).getAuthority()); - Assert.assertEquals(productItem.getName(), metadataValues.get(0).getValue()); + List productTitles = (List) solrDoc.get("dc.relation.product"); + Assert.assertEquals(1, productTitles.size()); + Assert.assertEquals(productItem.getName(), productTitles.get(0)); + + List productAuthorities = (List) solrDoc.get("dc.relation.product_authority"); + Assert.assertEquals(1, productAuthorities.size()); + Assert.assertEquals(productItem.getID().toString(), productAuthorities.get(0)); + } finally { + configurationService.setProperty("authority.controlled.dc.relation.product", "false"); + metadataAuthorityService.clearCache(); + } } @Test public void testItemMentioningNotExistingAuthorityIsCreated() throws Exception { - UUID notExistingItemId = UUID.fromString("803762b5-6f73-4870-b941-adf3c5626f04"); - Collection publicationItemCollection = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("publication") - .withName("test_collection").build(); - Item publicationItem = ItemBuilder.createItem(context, publicationItemCollection) - .withPersonIdentifierFirstName("test_first_name") - .withPersonIdentifierLastName("test_second_name") - .withScopusAuthorIdentifier("test_author_identifier") - .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "publicationTitle") - .withType("publication") - .build(); + try { + configurationService.setProperty("authority.controlled.dc.relation.product", "true"); + metadataAuthorityService.clearCache(); - Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("product") - .withName("test_collection").build(); - Item productItem = ItemBuilder.createItem(context, productItemCollection) - .withPersonIdentifierFirstName("test_first_name") - .withPersonIdentifierLastName("test_second_name") - .withScopusAuthorIdentifier("test_author_identifier") - .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "productTitle") - .withMetadata(MetadataSchemaEnum.DC.getName(), "relation", "product", - null, "notExistingPublicationTitle", notExistingItemId.toString(), Choices.CF_ACCEPTED) - .withType("product") - .build(); + UUID notExistingItemId = UUID.fromString("803762b5-6f73-4870-b941-adf3c5626f04"); + Collection publicationItemCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("publication") + .withName("test_collection").build(); + Item publicationItem = ItemBuilder.createItem(context, publicationItemCollection) + .withPersonIdentifierFirstName("test_first_name") + .withPersonIdentifierLastName("test_second_name") + .withScopusAuthorIdentifier("test_author_identifier") + .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "publicationTitle") + .withType("publication") + .build(); + + Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("product") + .withName("test_collection").build(); + Item productItem = ItemBuilder.createItem(context, productItemCollection) + .withPersonIdentifierFirstName("test_first_name") + .withPersonIdentifierLastName("test_second_name") + .withScopusAuthorIdentifier("test_author_identifier") + .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "productTitle") + .withMetadata(MetadataSchemaEnum.DC.getName(), "relation", "product", + null, "notExistingPublicationTitle", notExistingItemId.toString(), Choices.CF_ACCEPTED) + .withType("product") + .build(); + + List metadataValues = itemService.getMetadataByMetadataString( + publicationItem, "dc.relation.product"); + Assert.assertEquals(0, metadataValues.size()); - List metadataValues = itemService.getMetadataByMetadataString( - publicationItem, "dc.relation.product"); - Assert.assertEquals(0, metadataValues.size()); + SolrDocumentList solrDocumentList = getSolrDocumentList(publicationItem); + Assert.assertEquals(1, solrDocumentList.size()); + SolrDocument solrDoc = solrDocumentList.get(0); - Item foundProductItem = itemService.findByIdOrLegacyId(new Context(), productItem.getID().toString()); - Assert.assertEquals(productItem.getID(), foundProductItem.getID()); + List productTitles = (List) solrDoc.get("dc.relation.product"); + Assert.assertNull(productTitles); + + List productAuthorities = (List) solrDoc.get("dc.relation.product_authority"); + Assert.assertNull(productAuthorities); + + Item foundProductItem = itemService.findByIdOrLegacyId(new Context(), productItem.getID().toString()); + Assert.assertEquals(productItem.getID(), foundProductItem.getID()); + } finally { + configurationService.setProperty("authority.controlled.dc.relation.product", "false"); + metadataAuthorityService.clearCache(); + } } @Test public void testItemMentioningInvalidAuthorityIsCreated() throws Exception { - Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("product") - .withName("test_collection").build(); - Item productItem = ItemBuilder.createItem(context, productItemCollection) - .withPersonIdentifierFirstName("test_first_name") - .withPersonIdentifierLastName("test_second_name") - .withScopusAuthorIdentifier("test_author_identifier") - .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "productTitle") - .withMetadata(MetadataSchemaEnum.DC.getName(), "relation", "product", - null, "notExistingPublicationTitle", "invalidAuthorityUUID", Choices.CF_ACCEPTED) - .withType("product") - .build(); + try { + configurationService.setProperty("authority.controlled.dc.relation.product", "true"); + metadataAuthorityService.clearCache(); + + Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("product") + .withName("test_collection").build(); + Item productItem = ItemBuilder.createItem(context, productItemCollection) + .withPersonIdentifierFirstName("test_first_name") + .withPersonIdentifierLastName("test_second_name") + .withScopusAuthorIdentifier("test_author_identifier") + .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "productTitle") + .withMetadata(MetadataSchemaEnum.DC.getName(), "relation", "product", + null, "notExistingPublicationTitle", "invalidAuthorityUUID", Choices.CF_ACCEPTED) + .withType("product") + .build(); + + SolrDocumentList solrDocumentList = getSolrDocumentList(productItem); + Assert.assertEquals(1, solrDocumentList.size()); + SolrDocument solrDoc = solrDocumentList.get(0); + + List publicationTitles = (List) solrDoc.get("dc.relation.publication"); + Assert.assertNull(publicationTitles); - Item foundProductItem = itemService.findByIdOrLegacyId(new Context(), productItem.getID().toString()); - Assert.assertEquals(productItem.getID(), foundProductItem.getID()); + List publicationAuthorities = (List) solrDoc.get("dc.relation.publication_authority"); + Assert.assertNull(publicationAuthorities); + + Item foundProductItem = itemService.findByIdOrLegacyId(new Context(), productItem.getID().toString()); + Assert.assertEquals(productItem.getID(), foundProductItem.getID()); + } finally { + configurationService.setProperty("authority.controlled.dc.relation.product", "false"); + metadataAuthorityService.clearCache(); + } } @Test public void testItemWithoutAuthorityIsCreated() throws Exception { - String publicationTitle = "publicationTitle"; - Collection publicatoinItemCollection = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("publication") - .withName("test_collection").build(); - Item publicatoinItem = ItemBuilder.createItem(context, publicatoinItemCollection) - .withPersonIdentifierFirstName("test_first_name") - .withPersonIdentifierLastName("test_second_name") - .withScopusAuthorIdentifier("test_author_identifier") - .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, publicationTitle) - .withType("publication") - .build(); + try { + configurationService.setProperty("authority.controlled.dc.relation.product", "true"); + metadataAuthorityService.clearCache(); + String publicationTitle = "publicationTitle"; + Collection publicatoinItemCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("publication") + .withName("test_collection").build(); + Item publicationItem = ItemBuilder.createItem(context, publicatoinItemCollection) + .withPersonIdentifierFirstName("test_first_name") + .withPersonIdentifierLastName("test_second_name") + .withScopusAuthorIdentifier("test_author_identifier") + .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, publicationTitle) + .withType("publication") + .build(); - Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("product") - .withName("test_collection").build(); - Item productItem = ItemBuilder.createItem(context, productItemCollection) - .withPersonIdentifierFirstName("test_first_name") - .withPersonIdentifierLastName("test_second_name") - .withScopusAuthorIdentifier("test_author_identifier") - .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "productTitle") - .withMetadata(MetadataSchemaEnum.DC.getName(), "relation", "publication", publicationTitle) - .withType("product") - .build(); + Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("product") + .withName("test_collection").build(); + Item productItem = ItemBuilder.createItem(context, productItemCollection) + .withPersonIdentifierFirstName("test_first_name") + .withPersonIdentifierLastName("test_second_name") + .withScopusAuthorIdentifier("test_author_identifier") + .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "productTitle") + .withMetadata(MetadataSchemaEnum.DC.getName(), "relation", "publication", publicationTitle) + .withType("product") + .build(); + + List metadataValues = itemService.getMetadataByMetadataString( + publicationItem, "dc.relation.product"); + Assert.assertEquals(0, metadataValues.size()); + + SolrDocumentList solrDocumentList = getSolrDocumentList(publicationItem); + Assert.assertEquals(1, solrDocumentList.size()); + SolrDocument solrDoc = solrDocumentList.get(0); - List metadataValues = itemService.getMetadataByMetadataString( - publicatoinItem, "dc.relation.product"); - Assert.assertEquals(0, metadataValues.size()); + List productTitles = (List) solrDoc.get("dc.relation.product"); + Assert.assertNull(productTitles); - Item foundProductItem = itemService.findByIdOrLegacyId(new Context(), productItem.getID().toString()); - Assert.assertEquals(productItem.getID(), foundProductItem.getID()); + List productAuthorities = (List) solrDoc.get("dc.relation.product_authority"); + Assert.assertNull(productAuthorities); + + Item foundProductItem = itemService.findByIdOrLegacyId(new Context(), productItem.getID().toString()); + Assert.assertEquals(productItem.getID(), foundProductItem.getID()); + } finally { + configurationService.setProperty("authority.controlled.dc.relation.product", "false"); + metadataAuthorityService.clearCache(); + } } @Test public void testItemWithoutPublicationMetadataIsCreated() throws Exception { - Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("product") - .withName("test_collection").build(); - Item productItem = ItemBuilder.createItem(context, productItemCollection) - .withPersonIdentifierFirstName("test_first_name") - .withPersonIdentifierLastName("test_second_name") - .withScopusAuthorIdentifier("test_author_identifier") - .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "productTitle") - .withType("product") - .build(); + try { + configurationService.setProperty("authority.controlled.dc.relation.product", "true"); + metadataAuthorityService.clearCache(); + + Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("product") + .withName("test_collection").build(); + Item productItem = ItemBuilder.createItem(context, productItemCollection) + .withPersonIdentifierFirstName("test_first_name") + .withPersonIdentifierLastName("test_second_name") + .withScopusAuthorIdentifier("test_author_identifier") + .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "productTitle") + .withType("product") + .build(); + + List productItemMetadataValues = itemService.getMetadataByMetadataString( + productItem, "dc.relation.publication"); + Assert.assertEquals(0, productItemMetadataValues.size()); + + SolrDocumentList solrDocumentList = getSolrDocumentList(productItem); + Assert.assertEquals(1, solrDocumentList.size()); + SolrDocument solrDoc = solrDocumentList.get(0); - List productItemMetadataValues = itemService.getMetadataByMetadataString( - productItem, "dc.relation.publication"); - Assert.assertEquals(0, productItemMetadataValues.size()); + List publicationTitles = (List) solrDoc.get("dc.relation.publication"); + Assert.assertNull(publicationTitles); - Item foundProductItem = itemService.findByIdOrLegacyId(new Context(), productItem.getID().toString()); - Assert.assertEquals(productItem.getID(), foundProductItem.getID()); + List publicationAuthorities = (List) solrDoc.get("dc.relation.publication_authority"); + Assert.assertNull(publicationAuthorities); + + Item foundProductItem = itemService.findByIdOrLegacyId(new Context(), productItem.getID().toString()); + Assert.assertEquals(productItem.getID(), foundProductItem.getID()); + } finally { + configurationService.setProperty("authority.controlled.dc.relation.product", "false"); + metadataAuthorityService.clearCache(); + } + } + + private SolrDocumentList getSolrDocumentList(Item item) throws Exception { + SolrQuery solrQuery = new SolrQuery(); + solrQuery.setQuery("search.resourceid:" + item.getID()); + QueryResponse queryResponse = searchService.getSolr().query(solrQuery); + return queryResponse.getResults(); } + private void initializeReciprocalConfiguration() throws Exception { + Dispatcher dispatcher = eventService.getDispatcher("default"); + Object object = dispatcher.getConsumers(); + if (object instanceof Map) { + Map consumers = (LinkedHashMap) dispatcher.getConsumers(); + + ConsumerProfile consumerProfile = consumers.get("reciprocal"); + consumerProfile.getConsumer().initialize(); + } + } } \ No newline at end of file diff --git a/dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplTest.java b/dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplIT.java similarity index 98% rename from dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplTest.java rename to dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplIT.java index 2e5cb01dd2ca..acb182f5bc88 100644 --- a/dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplTest.java +++ b/dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplIT.java @@ -41,9 +41,9 @@ * Date: 20 Sep 2019 */ @Ignore -public class RelationshipDAOImplTest extends AbstractIntegrationTest { +public class RelationshipDAOImplIT extends AbstractIntegrationTest { - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(RelationshipDAOImplTest.class); + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(RelationshipDAOImplIT.class); private Relationship relationship; diff --git a/dspace-api/src/test/java/org/dspace/content/dao/RelationshipTypeDAOImplTest.java b/dspace-api/src/test/java/org/dspace/content/dao/RelationshipTypeDAOImplIT.java similarity index 98% rename from dspace-api/src/test/java/org/dspace/content/dao/RelationshipTypeDAOImplTest.java rename to dspace-api/src/test/java/org/dspace/content/dao/RelationshipTypeDAOImplIT.java index f7b8748748c8..67908e9890fd 100644 --- a/dspace-api/src/test/java/org/dspace/content/dao/RelationshipTypeDAOImplTest.java +++ b/dspace-api/src/test/java/org/dspace/content/dao/RelationshipTypeDAOImplIT.java @@ -36,9 +36,9 @@ import org.junit.Ignore; import org.junit.Test; @Ignore -public class RelationshipTypeDAOImplTest extends AbstractIntegrationTest { +public class RelationshipTypeDAOImplIT extends AbstractIntegrationTest { - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(RelationshipTypeDAOImplTest.class); + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(RelationshipTypeDAOImplIT.class); private Relationship relationship; diff --git a/dspace-api/src/test/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumerIT.java b/dspace-api/src/test/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumerIT.java index 5e95c28f65b7..176f055a4468 100644 --- a/dspace-api/src/test/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumerIT.java +++ b/dspace-api/src/test/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumerIT.java @@ -10,6 +10,7 @@ import static org.dspace.app.matcher.MetadataValueMatcher.with; import static org.dspace.core.CrisConstants.PLACEHOLDER_PARENT_METADATA_VALUE; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; @@ -79,9 +80,12 @@ public void testSingleMetadataValueEnhancement() throws Exception { publication = commitAndReload(publication); List metadataValues = publication.getMetadata(); - assertThat(metadataValues, hasSize(9)); + assertThat(metadataValues, hasSize(11)); assertThat(metadataValues, hasItem(with("cris.virtual.department", "4Science"))); assertThat(metadataValues, hasItem(with("cris.virtualsource.department", personId))); + assertThat(metadataValues, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE))); + assertThat(metadataValues, hasItem(with("cris.virtualsource.author-orcid", personId))); + MetadataValue virtualField = getFirstMetadataValue(publication, "cris.virtual.department"); MetadataValue virtualSourceField = getFirstMetadataValue(publication, "cris.virtualsource.department"); @@ -93,10 +97,12 @@ public void testSingleMetadataValueEnhancement() throws Exception { publication = commitAndReload(publication); metadataValues = publication.getMetadata(); - assertThat(metadataValues, hasSize(10)); + assertThat(metadataValues, hasSize(12)); assertThat(metadataValues, hasItem(with("dc.contributor.author", "Walter White", personId, 600))); assertThat(metadataValues, hasItem(with("cris.virtual.department", "4Science"))); assertThat(metadataValues, hasItem(with("cris.virtualsource.department", personId))); + assertThat(metadataValues, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE))); + assertThat(metadataValues, hasItem(with("cris.virtualsource.author-orcid", personId))); assertThat(virtualField, equalTo(getFirstMetadataValue(publication, "cris.virtual.department"))); assertThat(virtualSourceField, equalTo(getFirstMetadataValue(publication, "cris.virtualsource.department"))); @@ -135,7 +141,7 @@ public void testManyMetadataValuesEnhancement() throws Exception { publication = commitAndReload(publication); List values = publication.getMetadata(); - assertThat(values, hasSize(18)); + assertThat(values, hasSize(26)); assertThat(values, hasItem(with("dc.contributor.author", "Red Smith"))); assertThat(values, hasItem(with("dc.contributor.author", "Walter White", person1.getID().toString(), 1, 600))); assertThat(values, hasItem(with("dc.contributor.author", "John Smith", person2.getID().toString(), 2, 600))); @@ -148,9 +154,18 @@ public void testManyMetadataValuesEnhancement() throws Exception { assertThat(values, hasItem(with("cris.virtualsource.department", person2.getID().toString(), 2))); assertThat(values, hasItem(with("cris.virtual.department", "University of Rome", 3))); assertThat(values, hasItem(with("cris.virtualsource.department", person3.getID().toString(), 3))); - + assertThat(values, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 0))); + assertThat(values, hasItem(with("cris.virtualsource.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 0))); + assertThat(values, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 1))); + assertThat(values, hasItem(with("cris.virtualsource.author-orcid", person1.getID().toString(), 1))); + assertThat(values, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 2))); + assertThat(values, hasItem(with("cris.virtualsource.author-orcid", person2.getID().toString(), 2))); + assertThat(values, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 3))); + assertThat(values, hasItem(with("cris.virtualsource.author-orcid", person3.getID().toString(), 3))); assertThat(getMetadataValues(publication, "cris.virtual.department"), hasSize(4)); assertThat(getMetadataValues(publication, "cris.virtualsource.department"), hasSize(4)); + assertThat(getMetadataValues(publication, "cris.virtual.author-orcid"), hasSize(4)); + assertThat(getMetadataValues(publication, "cris.virtualsource.author-orcid"), hasSize(4)); } @@ -188,7 +203,7 @@ public void testEnhancementAfterMetadataAddition() throws Exception { publication = commitAndReload(publication); metadataValues = publication.getMetadata(); - assertThat(metadataValues, hasSize(9)); + assertThat(metadataValues, hasSize(11)); assertThat(metadataValues, hasItem(with("dc.contributor.author", "Walter White", personId, 600))); assertThat(metadataValues, hasItem(with("cris.virtual.department", "4Science"))); assertThat(metadataValues, hasItem(with("cris.virtualsource.department", personId))); @@ -227,7 +242,7 @@ public void testEnhancementWithMetadataRemoval() throws Exception { publication = commitAndReload(publication); List values = publication.getMetadata(); - assertThat(values, hasSize(15)); + assertThat(values, hasSize(21)); assertThat(values, hasItem(with("dc.contributor.author", "Walter White", person1.getID().toString(), 0, 600))); assertThat(values, hasItem(with("dc.contributor.author", "John Smith", person2.getID().toString(), 1, 600))); assertThat(values, hasItem(with("dc.contributor.author", "Jesse Pinkman", person3.getID().toString(), 2, 600))); @@ -237,6 +252,12 @@ public void testEnhancementWithMetadataRemoval() throws Exception { assertThat(values, hasItem(with("cris.virtualsource.department", person2.getID().toString(), 1))); assertThat(values, hasItem(with("cris.virtual.department", "University of Rome", 2))); assertThat(values, hasItem(with("cris.virtualsource.department", person3.getID().toString(), 2))); + assertThat(values, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE))); + assertThat(values, hasItem(with("cris.virtualsource.author-orcid", person1.getID().toString()))); + assertThat(values, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE))); + assertThat(values, hasItem(with("cris.virtualsource.author-orcid", person2.getID().toString(), 1))); + assertThat(values, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE))); + assertThat(values, hasItem(with("cris.virtualsource.author-orcid", person3.getID().toString(), 2))); assertThat(getMetadataValues(publication, "cris.virtual.department"), hasSize(3)); assertThat(getMetadataValues(publication, "cris.virtualsource.department"), hasSize(3)); @@ -250,16 +271,21 @@ public void testEnhancementWithMetadataRemoval() throws Exception { publication = commitAndReload(publication); values = publication.getMetadata(); - assertThat(values, hasSize(12)); + assertThat(values, hasSize(16)); assertThat(values, hasItem(with("dc.contributor.author", "Walter White", person1.getID().toString(), 0, 600))); assertThat(values, hasItem(with("dc.contributor.author", "Jesse Pinkman", person3.getID().toString(), 1, 600))); assertThat(values, hasItem(with("cris.virtual.department", "4Science"))); assertThat(values, hasItem(with("cris.virtualsource.department", person1.getID().toString()))); assertThat(values, hasItem(with("cris.virtual.department", "University of Rome", 1))); assertThat(values, hasItem(with("cris.virtualsource.department", person3.getID().toString(), 1))); - + assertThat(values, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 0))); + assertThat(values, hasItem(with("cris.virtualsource.author-orcid", person1.getID().toString(), 0))); + assertThat(values, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 1))); + assertThat(values, hasItem(with("cris.virtualsource.author-orcid", person3.getID().toString(), 1))); assertThat(getMetadataValues(publication, "cris.virtual.department"), hasSize(2)); assertThat(getMetadataValues(publication, "cris.virtualsource.department"), hasSize(2)); + assertThat(getMetadataValues(publication, "cris.virtual.author-orcid"), hasSize(2)); + assertThat(getMetadataValues(publication, "cris.virtualsource.author-orcid"), hasSize(2)); } @@ -290,6 +316,75 @@ public void testWithWorkspaceItem() throws Exception { } + @Test + @SuppressWarnings("unchecked") + public void testEnhancementAfterItemUpdate() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item person = ItemBuilder.createItem(context, collection) + .withTitle("Walter White") + .withOrcidIdentifier("0000-0000-1111-2222") + .build(); + + String personId = person.getID().toString(); + + Item publication = ItemBuilder.createItem(context, collection) + .withTitle("Test publication") + .withEntityType("Publication") + .withAuthor("Jesse Pinkman") + .withAuthor("Saul Goodman") + .withAuthor("Walter White", person.getID().toString()) + .withAuthor("Gus Fring") + .build(); + + context.restoreAuthSystemState(); + publication = commitAndReload(publication); + + assertThat(getMetadataValues(publication, "dc.contributor.author"), contains( + with("dc.contributor.author", "Jesse Pinkman"), + with("dc.contributor.author", "Saul Goodman", 1), + with("dc.contributor.author", "Walter White", personId, 2, 600), + with("dc.contributor.author", "Gus Fring", 3))); + + assertThat(getMetadataValues(publication, "cris.virtual.author-orcid"), contains( + with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE), + with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 1), + with("cris.virtual.author-orcid", "0000-0000-1111-2222", 2), + with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 3))); + + assertThat(getMetadataValues(publication, "cris.virtualsource.author-orcid"), contains( + with("cris.virtualsource.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE), + with("cris.virtualsource.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 1), + with("cris.virtualsource.author-orcid", personId, 2), + with("cris.virtualsource.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 3))); + + context.turnOffAuthorisationSystem(); + itemService.addMetadata(context, publication, "dc", "title", "alternative", null, "Other name"); + itemService.update(context, publication); + context.restoreAuthSystemState(); + publication = commitAndReload(publication); + + assertThat(getMetadataValues(publication, "dc.contributor.author"), contains( + with("dc.contributor.author", "Jesse Pinkman"), + with("dc.contributor.author", "Saul Goodman", 1), + with("dc.contributor.author", "Walter White", personId, 2, 600), + with("dc.contributor.author", "Gus Fring", 3))); + + assertThat(getMetadataValues(publication, "cris.virtual.author-orcid"), contains( + with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE), + with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 1), + with("cris.virtual.author-orcid", "0000-0000-1111-2222", 2), + with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 3))); + + assertThat(getMetadataValues(publication, "cris.virtualsource.author-orcid"), contains( + with("cris.virtualsource.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE), + with("cris.virtualsource.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 1), + with("cris.virtualsource.author-orcid", personId, 2), + with("cris.virtualsource.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 3))); + + } + private MetadataValue getFirstMetadataValue(Item item, String metadataField) { return getMetadataValues(item, metadataField).get(0); } diff --git a/dspace-api/src/test/java/org/dspace/content/enhancer/script/ItemEnhancerScriptIT.java b/dspace-api/src/test/java/org/dspace/content/enhancer/script/ItemEnhancerScriptIT.java index 1bf48c373177..33913368b0a1 100644 --- a/dspace-api/src/test/java/org/dspace/content/enhancer/script/ItemEnhancerScriptIT.java +++ b/dspace-api/src/test/java/org/dspace/content/enhancer/script/ItemEnhancerScriptIT.java @@ -9,6 +9,7 @@ import static org.dspace.app.matcher.MetadataValueMatcher.with; import static org.dspace.content.Item.ANY; +import static org.dspace.content.enhancer.consumer.ItemEnhancerConsumer.ITEMENHANCER_ENABLED; import static org.dspace.core.CrisConstants.PLACEHOLDER_PARENT_METADATA_VALUE; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; @@ -48,12 +49,16 @@ public class ItemEnhancerScriptIT extends AbstractIntegrationTestWithDatabase { - private static String[] consumers; + private static ConfigurationService configService = DSpaceServicesFactory.getInstance().getConfigurationService(); + private static final EventService eventService = EventServiceFactory.getInstance().getEventService(); + private static boolean isEnabled; + private static String[] consumers; private ItemService itemService; private Collection collection; + /** * This method will be run before the first test as per @BeforeClass. It will * configure the event.dispatcher.default.consumers property to remove the @@ -61,13 +66,13 @@ public class ItemEnhancerScriptIT extends AbstractIntegrationTestWithDatabase { */ @BeforeClass public static void initConsumers() { - ConfigurationService configService = DSpaceServicesFactory.getInstance().getConfigurationService(); consumers = configService.getArrayProperty("event.dispatcher.default.consumers"); Set consumersSet = new HashSet(Arrays.asList(consumers)); - consumersSet.remove("itemenhancer"); - configService.setProperty("event.dispatcher.default.consumers", consumersSet.toArray()); - EventService eventService = EventServiceFactory.getInstance().getEventService(); - eventService.reloadConfiguration(); + if (!consumersSet.contains("itemenhancer")) { + consumersSet.add("itemenhancer"); + configService.setProperty("event.dispatcher.default.consumers", consumersSet.toArray()); + eventService.reloadConfiguration(); + } } /** @@ -75,18 +80,19 @@ public static void initConsumers() { */ @AfterClass public static void resetDefaultConsumers() { - ConfigurationService configService = DSpaceServicesFactory.getInstance().getConfigurationService(); configService.setProperty("event.dispatcher.default.consumers", consumers); - EventService eventService = EventServiceFactory.getInstance().getEventService(); eventService.reloadConfiguration(); } @Before public void setup() { + configService.setProperty(ITEMENHANCER_ENABLED, false); + itemService = ContentServiceFactory.getInstance().getItemService(); context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) .withName("Parent Community") .build(); diff --git a/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ReferCrosswalkIT.java b/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ReferCrosswalkIT.java index b4ecc73a0c46..5cb9e9dc6b8f 100644 --- a/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ReferCrosswalkIT.java +++ b/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ReferCrosswalkIT.java @@ -34,6 +34,7 @@ import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Date; +import java.util.Locale; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; @@ -59,6 +60,11 @@ import org.dspace.content.MetadataField; import org.dspace.content.MetadataFieldServiceImpl; import org.dspace.content.RelationshipType; +import org.dspace.content.authority.Choices; +import org.dspace.content.authority.DCInputAuthority; +import org.dspace.content.authority.factory.ContentAuthorityServiceFactory; +import org.dspace.content.authority.service.ChoiceAuthorityService; +import org.dspace.content.authority.service.MetadataAuthorityService; import org.dspace.content.crosswalk.StreamDisseminationCrosswalk; import org.dspace.content.integration.crosswalks.virtualfields.VirtualField; import org.dspace.content.integration.crosswalks.virtualfields.VirtualFieldMapper; @@ -69,6 +75,8 @@ import org.dspace.eperson.EPerson; import org.dspace.layout.CrisLayoutBox; import org.dspace.layout.LayoutSecurity; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.utils.DSpace; import org.json.JSONObject; import org.junit.After; @@ -82,6 +90,7 @@ * */ public class ReferCrosswalkIT extends AbstractIntegrationTestWithDatabase { + static final String CFG_PREFIX = "identifier.doi.prefix"; private static final String BASE_OUTPUT_DIR_PATH = "./target/testing/dspace/assetstore/crosswalk/"; @@ -99,6 +108,12 @@ public class ReferCrosswalkIT extends AbstractIntegrationTestWithDatabase { private VirtualField virtualFieldId; + private ConfigurationService configurationService; + + private MetadataAuthorityService metadataAuthorityService; + + private ChoiceAuthorityService choiceAuthorityService; + @Before public void setup() throws SQLException, AuthorizeException { @@ -111,6 +126,10 @@ public void setup() throws SQLException, AuthorizeException { this.itemService = new DSpace().getSingletonService(ItemServiceImpl.class); this.mfss = new DSpace().getSingletonService(MetadataFieldServiceImpl.class); + this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + this.metadataAuthorityService = ContentAuthorityServiceFactory.getInstance().getMetadataAuthorityService(); + this.choiceAuthorityService = ContentAuthorityServiceFactory.getInstance().getChoiceAuthorityService(); + this.virtualFieldId = this.virtualFieldMapper.getVirtualField("id"); VirtualField mockedVirtualFieldId = mock(VirtualField.class); @@ -2530,6 +2549,302 @@ public void testVirtualBitstreamFieldWithProject() throws Exception { assertThat(resultLines[54].trim(), equalTo("")); } + @Test + public void testExportToDataciteFormatItemWithThreeDOI() throws Exception { + String prefix; + prefix = this.configurationService.getProperty(CFG_PREFIX); + if (null == prefix) { + throw new RuntimeException("Unable to load DOI prefix from " + + "configuration. Cannot find property " + + CFG_PREFIX + "."); + } + + context.turnOffAuthorisationSystem(); + + Item publication = createItem(context, collection) + .withEntityType("Publication") + .withTitle("publication title") + .withDoiIdentifier("test doi") + .withDoiIdentifier("test doi2") + .withDoiIdentifier("test" + prefix + "test") + .build(); + + context.restoreAuthSystemState(); + + ReferCrosswalk referCrosswalk = new DSpace().getServiceManager() + .getServiceByName("referCrosswalkVirtualFieldDOI", ReferCrosswalk.class); + assertThat(referCrosswalk, notNullValue()); + + ByteArrayOutputStream out = new ByteArrayOutputStream(); + referCrosswalk.disseminate(context, publication, out); + + String[] resultLines = out.toString().split("\n"); + + assertThat(resultLines.length, is(5)); + assertThat(resultLines[0].trim(), is("{")); + assertThat(resultLines[1].trim(), is("\"primary-doi\": \"test" + prefix + "test\",")); + assertThat(resultLines[2].trim(), is("\"alternative-doi\": \"test doi\",")); + assertThat(resultLines[3].trim(), is("\"alternative-doi\": \"test doi2\"")); + assertThat(resultLines[4].trim(), is("}")); + } + + @Test + public void testExportToDataciteFormatItemWithSingleDOINotMatchingPrefix() throws Exception { + String prefix; + prefix = this.configurationService.getProperty(CFG_PREFIX); + if (null == prefix) { + throw new RuntimeException("Unable to load DOI prefix from " + + "configuration. Cannot find property " + + CFG_PREFIX + "."); + } + + context.turnOffAuthorisationSystem(); + + Item publication = createItem(context, collection) + .withEntityType("Publication") + .withTitle("publication title") + .withDoiIdentifier("test doi") + .build(); + + context.restoreAuthSystemState(); + + ReferCrosswalk referCrosswalk = new DSpace().getServiceManager() + .getServiceByName("referCrosswalkVirtualFieldDOI", ReferCrosswalk.class); + assertThat(referCrosswalk, notNullValue()); + + ByteArrayOutputStream out = new ByteArrayOutputStream(); + referCrosswalk.disseminate(context, publication, out); + + String[] resultLines = out.toString().split("\n"); + + assertThat(resultLines.length, is(3)); + assertThat(resultLines[0].trim(), is("{")); + assertThat(resultLines[1].trim(), is("\"primary-doi\": \"test doi\"")); + assertThat(resultLines[2].trim(), is("}")); + } + + + + @Test + public void testPublicationVirtualFieldWithVocabularyValuePairList() throws Exception { + + Locale defaultLocale = context.getCurrentLocale(); + String[] defaultLocales = this.configurationService.getArrayProperty("webui.supported.locales"); + + try { + + Locale ukranian = new Locale("uk"); + + context.turnOffAuthorisationSystem(); + // reset supported locales + this.configurationService.setProperty( + "webui.supported.locales", + new String[] {Locale.ENGLISH.getLanguage(), Locale.ITALIAN.getLanguage(), ukranian.getLanguage()} + ); + this.metadataAuthorityService.clearCache(); + this.choiceAuthorityService.clearCache(); + // reload plugin + DCInputAuthority.reset(); + DCInputAuthority.getPluginNames(); + // set italian locale + context.setCurrentLocale(Locale.ITALIAN); + + String vocabularyName = "publication-coar-types"; + Collection publicationCollection = + createCollection(context, community) + .withEntityType("Publication") + .withSubmissionDefinition("publication") + .withAdminGroup(eperson) + .build(); + + Item publicationItem = createItem(context, publicationCollection) + .withEntityType("Publication") + .withTitle("Publication title") + .withType("not translated", vocabularyName + ":c_7bab") + .withLanguage("en_US") + .build(); + + context.restoreAuthSystemState(); + + ReferCrosswalk referCrosswalk = + new DSpace().getServiceManager() + .getServiceByName( + "referCrosswalkPublicationVirtualVocabularyI18nFieldWithVocabulary", ReferCrosswalk.class + ); + assertThat(referCrosswalk, notNullValue()); + + ByteArrayOutputStream out = new ByteArrayOutputStream(); + referCrosswalk.disseminate(context, publicationItem, out); + + String[] resultLines = out.toString().split("\n"); + assertThat(resultLines.length, is(7)); + assertThat(resultLines[0].trim(), equalTo("")); + assertThat(resultLines[4].trim(), equalTo("articolo sul software")); + assertThat(resultLines[5].trim(), equalTo("Inglese (USA)")); + assertThat(resultLines[6].trim(), equalTo("")); + + context.setCurrentLocale(ukranian); + out = new ByteArrayOutputStream(); + referCrosswalk.disseminate(context, publicationItem, out); + + resultLines = out.toString().split("\n"); + assertThat(resultLines.length, is(7)); + assertThat(resultLines[0].trim(), equalTo("")); + assertThat(resultLines[4].trim(), equalTo("програмна стаття")); + assertThat(resultLines[5].trim(), equalTo("Американська (USA)")); + assertThat(resultLines[6].trim(), equalTo("")); + + } finally { + context.setCurrentLocale(defaultLocale); + this.configurationService.setProperty("webui.supported.locales",defaultLocales); + } + } + + @Test + public void testPublicationVirtualFieldValuePairList() throws Exception { + + context.turnOffAuthorisationSystem(); + String vocabularyName = "publication-coar-types"; + Collection publicationCollection = + createCollection(context, community) + .withEntityType("Publication") + .withSubmissionDefinition("publication") + .withAdminGroup(eperson) + .build(); + + Item publicationItem = createItem(context, publicationCollection) + .withTitle("Publication title") + .withType("not translated", vocabularyName + ":c_7bab") + .withLanguage("en_US") + .build(); + + context.restoreAuthSystemState(); + + ReferCrosswalk referCrosswalk = + new DSpace().getServiceManager() + .getServiceByName("referCrosswalkPublicationVirtualVocabularyI18nField", ReferCrosswalk.class); + assertThat(referCrosswalk, notNullValue()); + + ByteArrayOutputStream out = new ByteArrayOutputStream(); + referCrosswalk.disseminate(context, publicationItem, out); + + String[] resultLines = out.toString().split("\n"); + assertThat(resultLines.length, is(6)); + assertThat(resultLines[0].trim(), equalTo("")); + assertThat(resultLines[3].trim(), equalTo("software paper")); + assertThat(resultLines[4].trim(), equalTo("English (United States)")); + assertThat(resultLines[5].trim(), equalTo("")); + } + + @Test + public void testPublicationMultilanguageVirtualFieldValuePairList() throws Exception { + + Locale defaultLocale = context.getCurrentLocale(); + String[] defaultLocales = this.configurationService.getArrayProperty("webui.supported.locales"); + try { + + Locale ukranian = new Locale("uk"); + + context.turnOffAuthorisationSystem(); + // reset supported locales + this.configurationService.setProperty( + "webui.supported.locales", + new String[] {Locale.ENGLISH.getLanguage(), Locale.ITALIAN.getLanguage(), ukranian.getLanguage()} + ); + this.metadataAuthorityService.clearCache(); + this.choiceAuthorityService.clearCache(); + // reload plugin + DCInputAuthority.reset(); + DCInputAuthority.getPluginNames(); + // set italian locale + context.setCurrentLocale(Locale.ITALIAN); + + String subjectVocabularyName = "publication-coar-types"; + Collection publicationCollection = + createCollection(context, community) + .withEntityType("Publication") + .withSubmissionDefinition("languagetestprocess") + .withAdminGroup(eperson) + .build(); + + Item publicationItem = createItem(context, publicationCollection) + .withTitle("Publication title") + .withType("not translated", subjectVocabularyName + ":c_7bab") + .withLanguage("en_US") + .build(); + + this.itemService.addMetadata( + context, publicationItem, + "organization", "address", "addressCountry", + Item.ANY, "IT", null, Choices.CF_UNSET, 0 + ); + + context.restoreAuthSystemState(); + + ReferCrosswalk referCrosswalk = + new DSpace().getServiceManager() + .getServiceByName( + "referCrosswalkPublicationVirtualVocabularyI18nFieldWithVocabulary", ReferCrosswalk.class + ); + assertThat(referCrosswalk, notNullValue()); + + ByteArrayOutputStream out = new ByteArrayOutputStream(); + referCrosswalk.disseminate(context, publicationItem, out); + + String[] resultLines = out.toString().split("\n"); + assertThat(resultLines.length, is(7)); + assertThat(resultLines[0].trim(), equalTo("")); + assertThat(resultLines[3].trim(), equalTo("articolo sul software")); + assertThat(resultLines[4].trim(), equalTo("Inglese (USA)")); + assertThat(resultLines[5].trim(), equalTo("Italia")); + assertThat(resultLines[6].trim(), equalTo("")); + + context.turnOffAuthorisationSystem(); + // set uk locale + context.setCurrentLocale(ukranian); + context.restoreAuthSystemState(); + + out = new ByteArrayOutputStream(); + referCrosswalk.disseminate(context, publicationItem, out); + + resultLines = out.toString().split("\n"); + assertThat(resultLines.length, is(7)); + assertThat(resultLines[0].trim(), equalTo("")); + assertThat(resultLines[3].trim(), equalTo("програмна стаття")); + assertThat(resultLines[4].trim(), equalTo("Американська (USA)")); + // take value from submission_forms (_uk doesn't have the value-pair) + assertThat(resultLines[5].trim(), equalTo("Italia")); + assertThat(resultLines[6].trim(), equalTo("")); + + context.turnOffAuthorisationSystem(); + // set uknown locale + context.setCurrentLocale(new Locale("ru")); + context.restoreAuthSystemState(); + + out = new ByteArrayOutputStream(); + referCrosswalk.disseminate(context, publicationItem, out); + + // it uses the default locale (en) + resultLines = out.toString().split("\n"); + assertThat(resultLines.length, is(7)); + // takes the value from default (_ru doesn't exist) + assertThat(resultLines[0].trim(), equalTo("")); + assertThat(resultLines[3].trim(), equalTo("software paper")); + assertThat( + resultLines[4].trim(), equalTo("English (United States)") + ); + // takes the value from submission_forms (_ru doesn't exist) + assertThat(resultLines[5].trim(), equalTo("Italia")); + assertThat(resultLines[6].trim(), equalTo("")); + + } finally { + context.setCurrentLocale(defaultLocale); + configurationService.setProperty("webui.supported.locales", defaultLocales); + DCInputAuthority.reset(); + DCInputAuthority.getPluginNames(); + } + } + private void createSelectedRelationship(Item author, Item publication, RelationshipType selectedRelationshipType) { createRelationshipBuilder(context, publication, author, selectedRelationshipType, -1, -1).build(); diff --git a/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/XlsCollectionCrosswalkIT.java b/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/XlsCollectionCrosswalkIT.java index 8929e4d65116..6ed7d8ba3aa7 100644 --- a/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/XlsCollectionCrosswalkIT.java +++ b/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/XlsCollectionCrosswalkIT.java @@ -154,7 +154,7 @@ public void testBulkImportOfCollectionDisseminate() throws Exception { } String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), - "-f", tempWorkbookFile.getAbsolutePath() }; + "-f", tempWorkbookFile.getAbsolutePath(), "-e", admin.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); diff --git a/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalkIT.java b/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalkIT.java new file mode 100644 index 000000000000..e824fef5a9b1 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalkIT.java @@ -0,0 +1,256 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.integration.crosswalks; + +import static org.dspace.builder.CollectionBuilder.createCollection; +import static org.dspace.builder.CommunityBuilder.createCommunity; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.sql.SQLException; +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.time.temporal.ChronoUnit; +import java.util.List; +import java.util.zip.ZipEntry; +import java.util.zip.ZipFile; + +import org.apache.commons.collections.IteratorUtils; +import org.apache.commons.io.IOUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.authorize.AuthorizeException; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.EPersonBuilder; +import org.dspace.builder.GroupBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.utils.DSpace; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +public class ZipItemExportCrosswalkIT extends AbstractIntegrationTestWithDatabase { + + private ZipItemExportCrosswalk zipItemExportCrosswalk; + + private Community community; + + private Collection collection; + + @Before + public void setup() throws SQLException, AuthorizeException { + + zipItemExportCrosswalk = new DSpace().getServiceManager() + .getServicesByType(ZipItemExportCrosswalk.class).get(0); + + context.turnOffAuthorisationSystem(); + community = createCommunity(context).build(); + collection = createCollection(context, community).build(); + context.restoreAuthSystemState(); + + } + + @Test + public void testItemsExportWithAdmin() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item item1 = createItem("Test Item 1", "2022-01-01", "Luca Giamminonni"); + Item item2 = createItem("Test Item 2", "2022-03-01", "Walter White"); + Item item3 = createItem("Test Item 3", "2020-01-01", "Andrea Bollini"); + + Bitstream bitstream1 = createBitstream(item1, "test.txt", "This is a test"); + Bitstream bitstream2 = createBitstream(item3, "test.pdf", "Last test", "6 months"); + + String expectedEmbargo = LocalDate.now().plus(6, ChronoUnit.MONTHS).format(DateTimeFormatter.ISO_DATE); + + context.restoreAuthSystemState(); + + context.setCurrentUser(admin); + + File tempZip = File.createTempFile("test", "zip"); + tempZip.deleteOnExit(); + + try (FileOutputStream fos = new FileOutputStream(tempZip)) { + zipItemExportCrosswalk.disseminate(context, List.of(item1, item2, item3).iterator(), fos); + } + + try (ZipFile zipFile = new ZipFile(tempZip)) { + + ZipEntry zipEntry = zipFile.getEntry(item1.getID().toString() + "/mets.xml"); + assertThat(zipEntry, notNullValue()); + + String metsContent = getZipEntryContent(zipFile, zipEntry); + + assertThat(metsContent, containsString( + "2022-01-01")); + assertThat(metsContent, + containsString("Test Item 1")); + assertThat(metsContent, containsString("Luca Giamminonni")); + assertThat(metsContent, + containsString("test@email.com")); + assertThat(metsContent, + containsString("test.txt")); + + zipEntry = zipFile.getEntry(item1.getID().toString() + "/bitstream_" + bitstream1.getID().toString()); + assertThat(zipEntry, notNullValue()); + assertThat(getZipEntryContent(zipFile, zipEntry), is("This is a test")); + + zipEntry = zipFile.getEntry(item2.getID().toString() + "/mets.xml"); + assertThat(zipEntry, notNullValue()); + + metsContent = getZipEntryContent(zipFile, zipEntry); + + assertThat(metsContent, containsString( + "2022-03-01")); + assertThat(metsContent, + containsString("Test Item 2")); + assertThat(metsContent, containsString("Walter White")); + assertThat(metsContent, + containsString("test@email.com")); + + zipEntry = zipFile.getEntry(item3.getID().toString() + "/mets.xml"); + assertThat(zipEntry, notNullValue()); + + metsContent = getZipEntryContent(zipFile, zipEntry); + + assertThat(metsContent, containsString( + "2020-01-01")); + assertThat(metsContent, + containsString("Test Item 3")); + assertThat(metsContent, containsString("Andrea Bollini")); + assertThat(metsContent, + containsString("test@email.com")); + assertThat(metsContent, containsString("")); + assertThat(metsContent, + containsString("test.pdf")); + + zipEntry = zipFile.getEntry(item3.getID().toString() + "/bitstream_" + bitstream2.getID().toString()); + assertThat(zipEntry, notNullValue()); + assertThat(getZipEntryContent(zipFile, zipEntry), is("Last test")); + + assertThat(getAllEntries(zipFile), hasSize(5)); + + } + + } + + @Test + public void testItemsExportWithCurators() throws Exception { + + context.turnOffAuthorisationSystem(); + + Group curators = GroupBuilder.createGroup(context) + .withName("Curators") + .build(); + + EPerson user = EPersonBuilder.createEPerson(context) + .withEmail("user@test.com") + .withGroupMembership(curators) + .build(); + + Item item1 = createItem("Test Item 1", "2022-01-01", "Luca Giamminonni"); + Item item2 = createItem("Test Item 2", "2022-03-01", "Walter White"); + Item item3 = createItem("Test Item 3", "2020-01-01", "Andrea Bollini"); + + context.restoreAuthSystemState(); + + context.setCurrentUser(user); + + File tempZip = File.createTempFile("test", "zip"); + tempZip.deleteOnExit(); + + try (FileOutputStream fos = new FileOutputStream(tempZip)) { + zipItemExportCrosswalk.disseminate(context, List.of(item1, item2, item3).iterator(), fos); + } + + try (ZipFile zipFile = new ZipFile(tempZip)) { + assertThat(getAllEntries(zipFile), hasSize(3)); + } + + } + + @Test + public void testItemsExportWithNotAuthorizedUser() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item item1 = createItem("Test Item 1", "2022-01-01", "Luca Giamminonni"); + Item item2 = createItem("Test Item 2", "2022-03-01", "Walter White"); + Item item3 = createItem("Test Item 3", "2020-01-01", "Andrea Bollini"); + + context.restoreAuthSystemState(); + + context.setCurrentUser(eperson); + + File tempZip = File.createTempFile("test", "zip"); + tempZip.deleteOnExit(); + + try (FileOutputStream fos = new FileOutputStream(tempZip)) { + + AuthorizeException authorizeException = Assert.assertThrows(AuthorizeException.class, + () -> zipItemExportCrosswalk.disseminate(context, List.of(item1, item2, item3).iterator(), fos)); + + assertThat(authorizeException.getMessage(), + is("The current user is not allowed to perform a zip item export")); + } + + } + + private Item createItem(String title, String issueDate, String author) { + return ItemBuilder.createItem(context, collection) + .withTitle(title) + .withIssueDate(issueDate) + .withAuthor(author) + .build(); + } + + private Bitstream createBitstream(Item item, String name, String content) throws Exception { + return BitstreamBuilder.createBitstream(context, item, getInputStream(content)) + .withName(name) + .build(); + } + + private Bitstream createBitstream(Item item, String name, String content, String embargoPeriod) throws Exception { + return BitstreamBuilder.createBitstream(context, item, getInputStream(content)) + .withName(name) + .withEmbargoPeriod(embargoPeriod) + .build(); + } + + private String getZipEntryContent(ZipFile zipFile, ZipEntry zipEntry) throws IOException { + return IOUtils.toString(zipFile.getInputStream(zipEntry), StandardCharsets.UTF_8); + } + + private InputStream getInputStream(String str) { + return IOUtils.toInputStream(str, StandardCharsets.UTF_8); + } + + @SuppressWarnings("unchecked") + private List getAllEntries(ZipFile zipFile) { + return IteratorUtils.toList(zipFile.entries().asIterator()); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/content/security/CrisSecurityServiceIT.java b/dspace-api/src/test/java/org/dspace/content/security/CrisSecurityServiceIT.java index 854ab0fa300f..2fc14dbf0346 100644 --- a/dspace-api/src/test/java/org/dspace/content/security/CrisSecurityServiceIT.java +++ b/dspace-api/src/test/java/org/dspace/content/security/CrisSecurityServiceIT.java @@ -24,7 +24,10 @@ import org.dspace.builder.ItemBuilder; import org.dspace.content.Collection; import org.dspace.content.Item; +import org.dspace.content.logic.Filter; +import org.dspace.content.logic.LogicalStatementException; import org.dspace.content.security.service.CrisSecurityService; +import org.dspace.core.Context; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.utils.DSpace; @@ -368,6 +371,92 @@ public void testHasAccessWithGroupConfig() throws SQLException, AuthorizeExcepti assertThat(crisSecurityService.hasAccess(context, item, fourthUser, accessMode), is(true)); } + @Test + public void testHasAccessWithGroupConfigAndAdditionalFilter() throws SQLException, AuthorizeException { + + context.turnOffAuthorisationSystem(); + + Group firstGroup = GroupBuilder.createGroup(context) + .withName("Group 1") + .build(); + + Group secondGroup = GroupBuilder.createGroup(context) + .withName("Group 2") + .build(); + + Group thirdGroup = GroupBuilder.createGroup(context) + .withName("Group 3") + .build(); + + EPerson firstUser = EPersonBuilder.createEPerson(context) + .withEmail("user@mail.it") + .withGroupMembership(firstGroup) + .build(); + + EPerson secondUser = EPersonBuilder.createEPerson(context) + .withEmail("user2@mail.it") + .withGroupMembership(secondGroup) + .build(); + + EPerson thirdUser = EPersonBuilder.createEPerson(context) + .withEmail("user3@mail.it") + .withGroupMembership(thirdGroup) + .build(); + + EPerson fourthUser = EPersonBuilder.createEPerson(context) + .withEmail("user4@mail.it") + .withGroupMembership(thirdGroup) + .build(); + + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Test item") + .withDspaceObjectOwner("Owner", owner.getID().toString()) + .build(); + + Item itemNotAccessible = ItemBuilder.createItem(context, collection) + .withTitle("Test item not accessible") + .withDspaceObjectOwner("Owner", owner.getID().toString()) + .build(); + + context.restoreAuthSystemState(); + + AccessItemMode accessMode = buildAccessItemMode(CrisSecurity.GROUP); + when(accessMode.getGroups()).thenReturn(List.of("Group 1", thirdGroup.getID().toString())); + // filter valid only on first item + when(accessMode.getAdditionalFilter()).thenReturn(new Filter() { + @Override + public Boolean getResult(Context context, Item item) throws LogicalStatementException { + return item.getName().equals("Test item"); + } + + @Override + public String getName() { + return null; + } + + @Override + public void setBeanName(String s) {} + }); + + assertThat(crisSecurityService.hasAccess(context, item, eperson, accessMode), is(false)); + assertThat(crisSecurityService.hasAccess(context, item, admin, accessMode), is(false)); + assertThat(crisSecurityService.hasAccess(context, item, owner, accessMode), is(false)); + assertThat(crisSecurityService.hasAccess(context, item, collectionAdmin, accessMode), is(false)); + assertThat(crisSecurityService.hasAccess(context, item, communityAdmin, accessMode), is(false)); + assertThat(crisSecurityService.hasAccess(context, item, submitter, accessMode), is(false)); + assertThat(crisSecurityService.hasAccess(context, item, anotherSubmitter, accessMode), is(false)); + + assertThat(crisSecurityService.hasAccess(context, item, firstUser, accessMode), is(true)); + assertThat(crisSecurityService.hasAccess(context, item, secondUser, accessMode), is(false)); + assertThat(crisSecurityService.hasAccess(context, item, thirdUser, accessMode), is(true)); + assertThat(crisSecurityService.hasAccess(context, item, fourthUser, accessMode), is(true)); + + assertThat(crisSecurityService.hasAccess(context, itemNotAccessible, firstUser, accessMode), is(false)); + assertThat(crisSecurityService.hasAccess(context, itemNotAccessible, secondUser, accessMode), is(false)); + assertThat(crisSecurityService.hasAccess(context, itemNotAccessible, thirdUser, accessMode), is(false)); + assertThat(crisSecurityService.hasAccess(context, itemNotAccessible, fourthUser, accessMode), is(false)); + } + private AccessItemMode buildAccessItemMode(CrisSecurity... securities) { AccessItemMode mode = mock(AccessItemMode.class); when(mode.getSecurities()).thenReturn(List.of(securities)); diff --git a/dspace-api/src/test/java/org/dspace/content/service/ItemEnhancerServiceIT.java b/dspace-api/src/test/java/org/dspace/content/service/ItemEnhancerServiceIT.java new file mode 100644 index 000000000000..d766b9565282 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/content/service/ItemEnhancerServiceIT.java @@ -0,0 +1,106 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.service; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.verify; + +import org.apache.logging.log4j.Logger; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.enhancer.service.ItemEnhancerService; +import org.dspace.content.enhancer.service.impl.ItemEnhancerServiceImpl; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.utils.DSpace; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class ItemEnhancerServiceIT extends AbstractIntegrationTestWithDatabase { + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemEnhancerServiceIT.class); + + private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + private ItemService spyItemService = spy(itemService); + private ItemEnhancerServiceImpl itemEnhancerService = (ItemEnhancerServiceImpl) new DSpace() + .getSingletonService(ItemEnhancerService.class); + + Community community; + Collection collPub; + Collection collPerson; + Item person; + Item publication; + + /** + * This method will be run before every test as per @Before. It will + * initialize resources required for the tests. + */ + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + + community = CommunityBuilder.createCommunity(context) + .build(); + collPerson = CollectionBuilder.createCollection(context, community) + .withEntityType("Person") + .build(); + collPub = CollectionBuilder.createCollection(context, community) + .withEntityType("Publication") + .build(); + person = ItemBuilder.createItem(context, collPerson) + .withTitle("Famous Researcher") + .withAffiliation("Some department", null) + .build(); + + publication = ItemBuilder.createItem(context, collPub) + .withTitle("Item to enhance") + .withAuthor(person.getName(), person.getID().toString()) + .build(); + assertMetadataValue(itemService.getMetadataByMetadataString(publication, "cris.virtual.department").get(0), + "cris", "virtual", "department", "Some department", null, 0); + context.restoreAuthSystemState(); + itemEnhancerService.setItemService(spyItemService); + } + + @After + public void after() { + itemEnhancerService.setItemService(itemService); + } + + @Test + public void noUpdateRequiredTest() throws Exception { + context.turnOffAuthorisationSystem(); + itemEnhancerService.enhance(context, publication, false); + verify(spyItemService, never()).update(any(), any()); + itemEnhancerService.enhance(context, publication, true); + verify(spyItemService, never()).update(any(), any()); + context.restoreAuthSystemState(); + } + + + private void assertMetadataValue(MetadataValue metadataValue, String schema, String element, String qualifier, + String value, String authority, int place) { + assertThat(metadataValue.getValue(), equalTo(value)); + assertThat(metadataValue.getMetadataField().getMetadataSchema().getName(), equalTo(schema)); + assertThat(metadataValue.getMetadataField().getElement(), equalTo(element)); + assertThat(metadataValue.getMetadataField().getQualifier(), equalTo(qualifier)); + assertThat(metadataValue.getAuthority(), equalTo(authority)); + assertThat(metadataValue.getPlace(), equalTo(place)); + } +} diff --git a/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java b/dspace-api/src/test/java/org/dspace/content/service/ItemServiceIT.java similarity index 84% rename from dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java rename to dspace-api/src/test/java/org/dspace/content/service/ItemServiceIT.java index 6ea5bc5b784b..7d0ddcb5f9e9 100644 --- a/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java +++ b/dspace-api/src/test/java/org/dspace/content/service/ItemServiceIT.java @@ -30,6 +30,8 @@ import org.dspace.app.requestitem.RequestItem; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.builder.BitstreamBuilder; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; @@ -41,6 +43,7 @@ import org.dspace.builder.RequestItemBuilder; import org.dspace.builder.ResourcePolicyBuilder; import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.EntityType; @@ -52,6 +55,8 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.core.Constants; import org.dspace.eperson.Group; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.GroupService; import org.dspace.orcid.client.OrcidClient; import org.dspace.orcid.factory.OrcidServiceFactory; import org.dspace.orcid.factory.OrcidServiceFactoryImpl; @@ -63,8 +68,8 @@ import org.junit.Test; import org.orcid.jaxb.model.v3.release.search.expanded.ExpandedSearch; -public class ItemServiceTest extends AbstractIntegrationTestWithDatabase { - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemServiceTest.class); +public class ItemServiceIT extends AbstractIntegrationTestWithDatabase { + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemServiceIT.class); protected RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); protected RelationshipTypeService relationshipTypeService = ContentServiceFactory.getInstance() @@ -77,6 +82,8 @@ public class ItemServiceTest extends AbstractIntegrationTestWithDatabase { protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); protected MetadataValueService metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService(); protected VersioningService versioningService = VersionServiceFactory.getInstance().getVersionService(); + protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); + protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); protected OrcidClient orcidClient = OrcidServiceFactory.getInstance().getOrcidClient(); @@ -773,6 +780,154 @@ public void testRemoveItemThatHasRequests() throws Exception { assertNull(itemService.find(context, item.getID())); } + + @Test + public void testMoveItemToCollectionWithMoreRestrictiveItemReadPolicy() throws Exception { + /* Verify that, if we move an item from a collection with a permissive default item READ policy + * to a collection with a restrictive default item READ policy, + * that the item and its bundles do not retain the original permissive item READ policy. + * However, its bitstreams do. + */ + + context.turnOffAuthorisationSystem(); + + Group anonymous = groupService.findByName(context, Group.ANONYMOUS); + Group admin = groupService.findByName(context, Group.ADMIN); + + // Set up the two different collections: one permissive and one restrictive in its default READ policy. + Collection permissive = CollectionBuilder + .createCollection(context, community) + .build(); + Collection restrictive = CollectionBuilder + .createCollection(context, community) + .build(); + authorizeService.removePoliciesActionFilter(context, restrictive, Constants.DEFAULT_ITEM_READ); + authorizeService.addPolicy(context, restrictive, Constants.DEFAULT_ITEM_READ, admin); + + // Add an item to the permissive collection. + Item item = ItemBuilder + .createItem(context, permissive) + .build(); + + Bitstream bitstream = BitstreamBuilder.createBitstream(context, item, InputStream.nullInputStream()) + .build(); + + Bundle bundle = item.getBundles("ORIGINAL").get(0); + + // Verify that the item, bundle and bitstream each have exactly one READ policy, for the anonymous group. + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, item, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, bundle, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, bitstream, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + + // Move the item to the restrictive collection, making sure to inherit default policies. + itemService.move(context, item, permissive, restrictive, true); + + // Verify that the item's read policy now only allows administrators. + assertEquals( + List.of(admin), + authorizeService.getPoliciesActionFilter(context, item, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(admin), + authorizeService.getPoliciesActionFilter(context, bundle, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, bitstream, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + + context.restoreAuthSystemState(); + } + + @Test + public void testMoveItemToCollectionWithMoreRestrictiveBitstreamReadPolicy() throws Exception { + /* Verify that, if we move an item from a collection with a permissive default bitstream READ policy + * to a collection with a restrictive default bitstream READ policy, + * that the item's bitstreams do not retain the original permissive READ policy. + * However, the item itself and its bundles do retain the original policy. + */ + + context.turnOffAuthorisationSystem(); + + Group anonymous = groupService.findByName(context, Group.ANONYMOUS); + Group admin = groupService.findByName(context, Group.ADMIN); + + // Set up the two different collections: one permissive and one restrictive in its default READ policy. + Collection permissive = CollectionBuilder + .createCollection(context, community) + .build(); + Collection restrictive = CollectionBuilder + .createCollection(context, community) + .build(); + authorizeService.removePoliciesActionFilter(context, restrictive, Constants.DEFAULT_BITSTREAM_READ); + authorizeService.addPolicy(context, restrictive, Constants.DEFAULT_BITSTREAM_READ, admin); + + // Add an item to the permissive collection. + Item item = ItemBuilder + .createItem(context, permissive) + .build(); + + Bitstream bitstream = BitstreamBuilder.createBitstream(context, item, InputStream.nullInputStream()) + .build(); + + Bundle bundle = item.getBundles("ORIGINAL").get(0); + + // Verify that the item, bundle and bitstream each have exactly one READ policy, for the anonymous group. + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, item, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, bundle, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, bitstream, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + + // Move the item to the restrictive collection, making sure to inherit default policies. + itemService.move(context, item, permissive, restrictive, true); + + // Verify that the bundle and bitstream's read policies now only allows administrators. + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, item, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, bundle, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(admin), + authorizeService.getPoliciesActionFilter(context, bitstream, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + + context.restoreAuthSystemState(); + + } + private void assertMetadataValue(String authorQualifier, String contributorElement, String dcSchema, String value, String authority, int place, MetadataValue metadataValue) { assertThat(metadataValue.getValue(), equalTo(value)); diff --git a/dspace-api/src/test/java/org/dspace/core/ContextIT.java b/dspace-api/src/test/java/org/dspace/core/ContextIT.java new file mode 100644 index 000000000000..6cf8336171f2 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/core/ContextIT.java @@ -0,0 +1,47 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.core; + +import static org.junit.Assert.assertEquals; + +import java.util.List; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.builder.CommunityBuilder; +import org.junit.Test; + +public class ContextIT extends AbstractIntegrationTestWithDatabase { + + AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); + + @Test + public void testGetPoliciesNewCommunityAfterReadOnlyModeChange() throws Exception { + + context.turnOffAuthorisationSystem(); + + // First disable the index consumer. The indexing process calls the authorizeService + // function used in this test and may affect the test + context.setDispatcher("noindex"); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + context.restoreAuthSystemState(); + + context.setMode(Context.Mode.READ_ONLY); + + List policies = authorizeService.getPoliciesActionFilter(context, parentCommunity, + Constants.READ); + + assertEquals("Should return the default anonymous group read policy", 1, policies.size()); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/core/UtilsTest.java b/dspace-api/src/test/java/org/dspace/core/UtilsTest.java index 920fa69d6d31..291561ac2536 100644 --- a/dspace-api/src/test/java/org/dspace/core/UtilsTest.java +++ b/dspace-api/src/test/java/org/dspace/core/UtilsTest.java @@ -75,6 +75,12 @@ public void testGetHostName() { assertEquals("Test keep other prefixes", "demo.dspace.org", Utils.getHostName("https://demo.dspace.org")); + assertEquals("Test with parameter", "demo.dspace.org", + Utils.getHostName("https://demo.dspace.org/search?query=test")); + + assertEquals("Test with parameter with space", "demo.dspace.org", + Utils.getHostName("https://demo.dspace.org/search?query=test turbine")); + // This uses a bunch of reserved URI characters assertNull("Test invalid URI returns null", Utils.getHostName("&+,?/@=")); } diff --git a/dspace-api/src/test/java/org/dspace/ctask/general/CreateMissingIdentifiersIT.java b/dspace-api/src/test/java/org/dspace/ctask/general/CreateMissingIdentifiersIT.java index 88610ea95943..480604e9393e 100644 --- a/dspace-api/src/test/java/org/dspace/ctask/general/CreateMissingIdentifiersIT.java +++ b/dspace-api/src/test/java/org/dspace/ctask/general/CreateMissingIdentifiersIT.java @@ -9,8 +9,6 @@ import static org.junit.Assert.assertEquals; -import java.io.IOException; - import org.dspace.AbstractIntegrationTestWithDatabase; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; @@ -40,7 +38,7 @@ public class CreateMissingIdentifiersIT @Test public void testPerform() - throws IOException { + throws Exception { // Must remove any cached named plugins before creating a new one CoreServiceFactory.getInstance().getPluginService().clearNamedPluginClasses(); ConfigurationService configurationService = kernelImpl.getConfigurationService(); diff --git a/dspace-api/src/test/java/org/dspace/curate/CurationIT.java b/dspace-api/src/test/java/org/dspace/curate/CurationIT.java index 6232793c7408..31bfe2550a4a 100644 --- a/dspace-api/src/test/java/org/dspace/curate/CurationIT.java +++ b/dspace-api/src/test/java/org/dspace/curate/CurationIT.java @@ -43,8 +43,9 @@ public void curationWithoutEPersonParameterTest() throws Exception { script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); } if (script != null) { - script.initialize(args, testDSpaceRunnableHandler, null); - script.run(); + if (DSpaceRunnable.StepResult.Continue.equals(script.initialize(args, testDSpaceRunnableHandler, null))) { + script.run(); + } } } @@ -69,8 +70,9 @@ public void curationWithEPersonParameterTest() throws Exception { script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); } if (script != null) { - script.initialize(args, testDSpaceRunnableHandler, null); - script.run(); + if (DSpaceRunnable.StepResult.Continue.equals(script.initialize(args, testDSpaceRunnableHandler, null))) { + script.run(); + } } } } diff --git a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java index 0d1cc13106a8..55be531418ae 100644 --- a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java +++ b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java @@ -7,14 +7,18 @@ */ package org.dspace.discovery; +import static org.dspace.discovery.SolrServiceWorkspaceWorkflowRestrictionPlugin.DISCOVER_WORKSPACE_CONFIGURATION_NAME; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; import java.util.Iterator; +import java.util.LinkedList; import java.util.List; +import java.util.stream.Collectors; import javax.servlet.http.HttpServletRequest; import org.dspace.AbstractIntegrationTestWithDatabase; @@ -24,6 +28,7 @@ import org.dspace.builder.ClaimedTaskBuilder; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; import org.dspace.builder.ItemBuilder; import org.dspace.builder.PoolTaskBuilder; import org.dspace.builder.WorkflowItemBuilder; @@ -39,6 +44,8 @@ import org.dspace.content.service.CollectionService; import org.dspace.content.service.ItemService; import org.dspace.content.service.WorkspaceItemService; +import org.dspace.discovery.configuration.DiscoveryConfiguration; +import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration; import org.dspace.discovery.indexobject.IndexableClaimedTask; import org.dspace.discovery.indexobject.IndexableCollection; import org.dspace.discovery.indexobject.IndexableItem; @@ -731,6 +738,64 @@ public void iteratorSearchServiceTest() throws SearchServiceException { } } + /** + * Test designed to check if default sort option for Discovery is working, using workspace + * DiscoveryConfiguration
      + * Note: this test will be skipped if workspace do not have a default sort option set and of + * metadataType dc_date_accessioned or lastModified + * @throws SearchServiceException + */ + @Test + public void searchWithDefaultSortServiceTest() throws SearchServiceException { + DiscoveryConfiguration workspaceConf = + SearchUtils.getDiscoveryConfiguration(context, DISCOVER_WORKSPACE_CONFIGURATION_NAME, null); + // Skip if no default sort option set for workspaceConf + if (workspaceConf.getSearchSortConfiguration().getDefaultSortField() == null) { + return; + } + + DiscoverySortFieldConfiguration defaultSortField = + workspaceConf.getSearchSortConfiguration().getDefaultSortField(); + + // Populate the testing objects: create items in eperson's workspace and perform search in it + int numberItems = 10; + context.turnOffAuthorisationSystem(); + EPerson submitter = EPersonBuilder.createEPerson(context).withEmail("submitter@example.org").build(); + context.setCurrentUser(submitter); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + for (int i = 0; i < numberItems; i++) { + ItemBuilder.createItem(context, collection) + .withTitle("item " + i) + .build(); + } + context.restoreAuthSystemState(); + + // Build query with default parameters (except for workspaceConf) + DiscoverQuery discoverQuery = SearchUtils.getQueryBuilder() + .buildQuery(context, new IndexableCollection(collection), workspaceConf,"",null,"Item",null,null, + null,null); + + DiscoverResult result = searchService.search(context, discoverQuery); + + /* + // code example for testing against sort by dc_date_accessioned + LinkedList dc_date_accesioneds = result.getIndexableObjects().stream() + .map(o -> ((Item) o.getIndexedObject()).getMetadata()) + .map(l -> l.stream().filter(m -> m.getMetadataField().toString().equals("dc_date_accessioned")) + .map(m -> m.getValue()).findFirst().orElse("") + ) + .collect(Collectors.toCollection(LinkedList::new)); + }*/ + LinkedList lastModifieds = result.getIndexableObjects().stream() + .map(o -> ((Item) o.getIndexedObject()).getLastModified().toString()) + .collect(Collectors.toCollection(LinkedList::new)); + assertFalse(lastModifieds.isEmpty()); + for (int i = 1; i < lastModifieds.size() - 1; i++) { + assertTrue(lastModifieds.get(i).compareTo(lastModifieds.get(i + 1)) >= 0); + } + } + private void assertSearchQuery(String resourceType, int size) throws SearchServiceException { assertSearchQuery(resourceType, size, size, 0, -1); } diff --git a/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java b/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java index b98db573566d..3780afcf6393 100644 --- a/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java +++ b/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java @@ -8,17 +8,23 @@ package org.dspace.eperson; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; import java.util.Iterator; import java.util.List; +import java.util.Set; import javax.mail.MessagingException; import org.apache.commons.codec.DecoderException; +import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.AbstractUnitTest; @@ -274,63 +280,184 @@ public void testFindByNetid() */ /** - * Test of search method, of class EPerson. + * Test of search() and searchResultCount() methods of EPersonService + * NOTE: Pagination is not verified here because it is tested in EPersonRestRepositoryIT */ -/* @Test - public void testSearch_Context_String() - throws Exception - { - System.out.println("search"); - Context context = null; - String query = ""; - EPerson[] expResult = null; - EPerson[] result = EPerson.search(context, query); - assertEquals(expResult, result); - // TODO review the generated test code and remove the default call to fail. - fail("The test case is a prototype."); + public void testSearchAndCountByNameEmail() throws SQLException, AuthorizeException, IOException { + List allEPeopleAdded = new ArrayList<>(); + Group testGroup = createGroup("TestingGroup"); + try { + // Create 4 EPersons. Add a few to a test group to verify group membership doesn't matter + EPerson eperson1 = createEPersonAndAddToGroup("eperson1@example.com", "Jane", "Doe", testGroup); + EPerson eperson2 = createEPerson("eperson2@example.com", "John", "Doe"); + EPerson eperson3 = createEPersonAndAddToGroup("eperson3@example.com", "John", "Smith", testGroup); + EPerson eperson4 = createEPerson("eperson4@example.com", "Doe", "Smith"); + allEPeopleAdded.addAll(Arrays.asList(eperson1, eperson2, eperson3, eperson4)); + + List allJohns = Arrays.asList(eperson2, eperson3); + List searchJohnResults = ePersonService.search(context, "John", -1, -1); + assertTrue(searchJohnResults.containsAll(allJohns)); + assertEquals(searchJohnResults.size(), ePersonService.searchResultCount(context, "John")); + + List allDoes = Arrays.asList(eperson1, eperson2, eperson4); + List searchDoeResults = ePersonService.search(context, "Doe", -1, -1); + assertTrue(searchDoeResults.containsAll(allDoes)); + assertEquals(searchDoeResults.size(), ePersonService.searchResultCount(context, "Doe")); + + List allSmiths = Arrays.asList(eperson3, eperson4); + List searchSmithResults = ePersonService.search(context, "Smith", -1, -1); + assertTrue(searchSmithResults.containsAll(allSmiths)); + assertEquals(searchSmithResults.size(), ePersonService.searchResultCount(context, "Smith")); + + // Assert search on example.com returns everyone + List searchEmailResults = ePersonService.search(context, "example.com", -1, -1); + assertTrue(searchEmailResults.containsAll(allEPeopleAdded)); + assertEquals(searchEmailResults.size(), ePersonService.searchResultCount(context, "example.com")); + + // Assert exact email search returns just one + List exactEmailResults = ePersonService.search(context, "eperson1@example.com", -1, -1); + assertTrue(exactEmailResults.contains(eperson1)); + assertEquals(exactEmailResults.size(), ePersonService.searchResultCount(context, "eperson1@example.com")); + + // Assert UUID search returns exact match + List uuidResults = ePersonService.search(context, eperson4.getID().toString(), -1, -1); + assertTrue(uuidResults.contains(eperson4)); + assertEquals(1, uuidResults.size()); + assertEquals(uuidResults.size(), ePersonService.searchResultCount(context, eperson4.getID().toString())); + } finally { + // Remove all Groups & EPersons we added for this test + context.turnOffAuthorisationSystem(); + groupService.delete(context, testGroup); + for (EPerson ePerson : allEPeopleAdded) { + ePersonService.delete(context, ePerson); + } + context.restoreAuthSystemState(); + } } -*/ /** - * Test of search method, of class EPerson. + * Test of searchNonMembers() and searchNonMembersCount() methods of EPersonService + * NOTE: Pagination is not verified here because it is tested in EPersonRestRepositoryIT */ -/* @Test - public void testSearch_4args() - throws Exception - { - System.out.println("search"); - Context context = null; - String query = ""; - int offset = 0; - int limit = 0; - EPerson[] expResult = null; - EPerson[] result = EPerson.search(context, query, offset, limit); - assertEquals(expResult, result); - // TODO review the generated test code and remove the default call to fail. - fail("The test case is a prototype."); - } -*/ + public void testSearchAndCountByNameEmailNonMembers() throws SQLException, AuthorizeException, IOException { + List allEPeopleAdded = new ArrayList<>(); + Group testGroup1 = createGroup("TestingGroup1"); + Group testGroup2 = createGroup("TestingGroup2"); + Group testGroup3 = createGroup("TestingGroup3"); + try { + // Create two EPersons in Group 1 + EPerson eperson1 = createEPersonAndAddToGroup("eperson1@example.com", "Jane", "Doe", testGroup1); + EPerson eperson2 = createEPersonAndAddToGroup("eperson2@example.com", "John", "Smith", testGroup1); + + // Create one more EPerson, and add it and a previous EPerson to Group 2 + EPerson eperson3 = createEPersonAndAddToGroup("eperson3@example.com", "John", "Doe", testGroup2); + context.turnOffAuthorisationSystem(); + groupService.addMember(context, testGroup2, eperson2); + groupService.update(context, testGroup2); + ePersonService.update(context, eperson2); + context.restoreAuthSystemState(); - /** - * Test of searchResultCount method, of class EPerson. - */ -/* - @Test - public void testSearchResultCount() - throws Exception - { - System.out.println("searchResultCount"); - Context context = null; - String query = ""; - int expResult = 0; - int result = EPerson.searchResultCount(context, query); - assertEquals(expResult, result); - // TODO review the generated test code and remove the default call to fail. - fail("The test case is a prototype."); + // Create 2 more EPersons with no group memberships + EPerson eperson4 = createEPerson("eperson4@example.com", "John", "Anthony"); + EPerson eperson5 = createEPerson("eperson5@example.org", "Smith", "Doe"); + allEPeopleAdded.addAll(Arrays.asList(eperson1, eperson2, eperson3, eperson4, eperson5)); + + // FIRST, test search by last name + // Verify all Does match a nonMember search of Group3 (which is an empty group) + List allDoes = Arrays.asList(eperson1, eperson3, eperson5); + List searchDoeResults = ePersonService.searchNonMembers(context, "Doe", testGroup3, -1, -1); + assertTrue(searchDoeResults.containsAll(allDoes)); + assertEquals(searchDoeResults.size(), ePersonService.searchNonMembersCount(context, "Doe", testGroup3)); + + // Verify searching "Doe" with Group 2 *excludes* the one which is already a member + List allNonMemberDoes = Arrays.asList(eperson1, eperson5); + List searchNonMemberDoeResults = ePersonService.searchNonMembers(context, "Doe", testGroup2, + -1, -1); + assertTrue(searchNonMemberDoeResults.containsAll(allNonMemberDoes)); + assertFalse(searchNonMemberDoeResults.contains(eperson3)); + assertEquals(searchNonMemberDoeResults.size(), ePersonService.searchNonMembersCount(context, "Doe", + testGroup2)); + + // Verify searching "Doe" with Group 1 *excludes* the one which is already a member + allNonMemberDoes = Arrays.asList(eperson3, eperson5); + searchNonMemberDoeResults = ePersonService.searchNonMembers(context, "Doe", testGroup1, -1, -1); + assertTrue(searchNonMemberDoeResults.containsAll(allNonMemberDoes)); + assertFalse(searchNonMemberDoeResults.contains(eperson1)); + assertEquals(searchNonMemberDoeResults.size(), ePersonService.searchNonMembersCount(context, "Doe", + testGroup1)); + + // SECOND, test search by first name + // Verify all Johns match a nonMember search of Group3 (which is an empty group) + List allJohns = Arrays.asList(eperson2, eperson3, eperson4); + List searchJohnResults = ePersonService.searchNonMembers(context, "John", + testGroup3, -1, -1); + assertTrue(searchJohnResults.containsAll(allJohns)); + assertEquals(searchJohnResults.size(), ePersonService.searchNonMembersCount(context, "John", + testGroup3)); + + // Verify searching "John" with Group 2 *excludes* the two who are already a member + List allNonMemberJohns = Arrays.asList(eperson4); + List searchNonMemberJohnResults = ePersonService.searchNonMembers(context, "John", + testGroup2, -1, -1); + assertTrue(searchNonMemberJohnResults.containsAll(allNonMemberJohns)); + assertFalse(searchNonMemberJohnResults.contains(eperson2)); + assertFalse(searchNonMemberJohnResults.contains(eperson3)); + assertEquals(searchNonMemberJohnResults.size(), ePersonService.searchNonMembersCount(context, "John", + testGroup2)); + + // FINALLY, test search by email + // Assert search on example.com excluding Group 1 returns just those not in that group + List exampleNonMembers = Arrays.asList(eperson3, eperson4); + List searchEmailResults = ePersonService.searchNonMembers(context, "example.com", + testGroup1, -1, -1); + assertTrue(searchEmailResults.containsAll(exampleNonMembers)); + assertFalse(searchEmailResults.contains(eperson1)); + assertFalse(searchEmailResults.contains(eperson2)); + assertEquals(searchEmailResults.size(), ePersonService.searchNonMembersCount(context, "example.com", + testGroup1)); + + // Assert exact email search returns just one (if not in group) + List exactEmailResults = ePersonService.searchNonMembers(context, "eperson1@example.com", + testGroup2, -1, -1); + assertTrue(exactEmailResults.contains(eperson1)); + assertEquals(exactEmailResults.size(), ePersonService.searchNonMembersCount(context, "eperson1@example.com", + testGroup2)); + // But, change the group to one they are a member of, and they won't be included + exactEmailResults = ePersonService.searchNonMembers(context, "eperson1@example.com", + testGroup1, -1, -1); + assertFalse(exactEmailResults.contains(eperson1)); + assertEquals(exactEmailResults.size(), ePersonService.searchNonMembersCount(context, "eperson1@example.com", + testGroup1)); + + // Assert UUID search returns exact match (if not in group) + List uuidResults = ePersonService.searchNonMembers(context, eperson3.getID().toString(), + testGroup1, -1, -1); + assertTrue(uuidResults.contains(eperson3)); + assertEquals(1, uuidResults.size()); + assertEquals(uuidResults.size(), ePersonService.searchNonMembersCount(context, eperson3.getID().toString(), + testGroup1)); + // But, change the group to one they are a member of, and you'll get no results + uuidResults = ePersonService.searchNonMembers(context, eperson3.getID().toString(), + testGroup2, -1, -1); + assertFalse(uuidResults.contains(eperson3)); + assertEquals(0, uuidResults.size()); + assertEquals(uuidResults.size(), ePersonService.searchNonMembersCount(context, eperson3.getID().toString(), + testGroup2)); + + } finally { + // Remove all Groups & EPersons we added for this test + context.turnOffAuthorisationSystem(); + groupService.delete(context, testGroup1); + groupService.delete(context, testGroup2); + groupService.delete(context, testGroup3); + for (EPerson ePerson : allEPeopleAdded) { + ePersonService.delete(context, ePerson); + } + context.restoreAuthSystemState(); + } } -*/ /** * Test of findAll method, of class EPerson. @@ -1029,6 +1156,57 @@ public void testCascadingDeleteSubmitterPreservesWorkflowItems() wfi.getSubmitter()); } + @Test + public void findAndCountByGroups() throws SQLException, AuthorizeException, IOException { + // Create a group with 3 EPerson members + Group group = createGroup("parentGroup"); + EPerson eperson1 = createEPersonAndAddToGroup("test1@example.com", group); + EPerson eperson2 = createEPersonAndAddToGroup("test2@example.com", group); + EPerson eperson3 = createEPersonAndAddToGroup("test3@example.com", group); + groupService.update(context, group); + + Group group2 = null; + EPerson eperson4 = null; + + try { + // Assert that findByGroup is the same list of EPersons as getMembers() when pagination is ignored + // (NOTE: Pagination is tested in GroupRestRepositoryIT) + // NOTE: isEqualCollection() must be used for comparison because Hibernate's "PersistentBag" cannot be + // compared directly to a List. See https://stackoverflow.com/a/57399383/3750035 + assertTrue( + CollectionUtils.isEqualCollection(group.getMembers(), + ePersonService.findByGroups(context, Set.of(group), -1, -1))); + // Assert countByGroups is the same as the size of members + assertEquals(group.getMembers().size(), ePersonService.countByGroups(context, Set.of(group))); + + // Add another group with duplicate EPerson + group2 = createGroup("anotherGroup"); + groupService.addMember(context, group2, eperson1); + groupService.update(context, group2); + + // Verify countByGroups is still 3 (existing person should not be counted twice) + assertEquals(3, ePersonService.countByGroups(context, Set.of(group, group2))); + + // Add a new EPerson to new group, verify count goes up by one + eperson4 = createEPersonAndAddToGroup("test4@example.com", group2); + assertEquals(4, ePersonService.countByGroups(context, Set.of(group, group2))); + } finally { + // Clean up our data + context.turnOffAuthorisationSystem(); + groupService.delete(context, group); + if (group2 != null) { + groupService.delete(context, group2); + } + ePersonService.delete(context, eperson1); + ePersonService.delete(context, eperson2); + ePersonService.delete(context, eperson3); + if (eperson4 != null) { + ePersonService.delete(context, eperson4); + } + context.restoreAuthSystemState(); + } + } + /** * Creates an item, sets the specified submitter. * @@ -1075,4 +1253,54 @@ private WorkspaceItem prepareWorkspaceItem(EPerson submitter) context.restoreAuthSystemState(); return wsi; } + + protected Group createGroup(String name) throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + Group group = groupService.create(context); + group.setName(name); + groupService.update(context, group); + context.restoreAuthSystemState(); + return group; + } + + protected EPerson createEPersonAndAddToGroup(String email, Group group) throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + EPerson ePerson = createEPerson(email); + groupService.addMember(context, group, ePerson); + groupService.update(context, group); + ePersonService.update(context, ePerson); + context.restoreAuthSystemState(); + return ePerson; + } + + protected EPerson createEPersonAndAddToGroup(String email, String firstname, String lastname, Group group) + throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + EPerson ePerson = createEPerson(email, firstname, lastname); + groupService.addMember(context, group, ePerson); + groupService.update(context, group); + ePersonService.update(context, ePerson); + context.restoreAuthSystemState(); + return ePerson; + } + + protected EPerson createEPerson(String email) throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + EPerson ePerson = ePersonService.create(context); + ePerson.setEmail(email); + ePersonService.update(context, ePerson); + context.restoreAuthSystemState(); + return ePerson; + } + protected EPerson createEPerson(String email, String firstname, String lastname) + throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + EPerson ePerson = ePersonService.create(context); + ePerson.setEmail(email); + ePerson.setFirstName(context, firstname); + ePerson.setLastName(context, lastname); + ePersonService.update(context, ePerson); + context.restoreAuthSystemState(); + return ePerson; + } } diff --git a/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java b/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java index ee9c883f1be6..fddcabe4b038 100644 --- a/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java +++ b/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java @@ -10,6 +10,7 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -21,6 +22,7 @@ import java.util.Collections; import java.util.List; +import org.apache.commons.collections4.CollectionUtils; import org.apache.logging.log4j.Logger; import org.dspace.AbstractUnitTest; import org.dspace.authorize.AuthorizeException; @@ -604,6 +606,30 @@ public void allMembers() throws SQLException, AuthorizeException, EPersonDeletio } } + @Test + public void countAllMembers() throws SQLException, AuthorizeException, EPersonDeletionException, IOException { + List allEPeopleAdded = new ArrayList<>(); + try { + context.turnOffAuthorisationSystem(); + allEPeopleAdded.add(createEPersonAndAddToGroup("allMemberGroups1@dspace.org", topGroup)); + allEPeopleAdded.add(createEPersonAndAddToGroup("allMemberGroups2@dspace.org", level1Group)); + allEPeopleAdded.add(createEPersonAndAddToGroup("allMemberGroups3@dspace.org", level2Group)); + context.restoreAuthSystemState(); + + assertEquals(3, groupService.countAllMembers(context, topGroup)); + assertEquals(2, groupService.countAllMembers(context, level1Group)); + assertEquals(1, groupService.countAllMembers(context, level2Group)); + } finally { + // Remove all the people added (in order to not impact other tests) + context.turnOffAuthorisationSystem(); + for (EPerson ePerson : allEPeopleAdded) { + ePersonService.delete(context, ePerson); + } + context.restoreAuthSystemState(); + } + } + + @Test public void isEmpty() throws SQLException, AuthorizeException, EPersonDeletionException, IOException { assertTrue(groupService.isEmpty(topGroup)); @@ -620,6 +646,143 @@ public void isEmpty() throws SQLException, AuthorizeException, EPersonDeletionEx assertTrue(groupService.isEmpty(level2Group)); } + @Test + public void findAndCountByParent() throws SQLException, AuthorizeException, IOException { + + // Create a parent group with 3 child groups + Group parentGroup = createGroup("parentGroup"); + Group childGroup = createGroup("childGroup"); + Group child2Group = createGroup("child2Group"); + Group child3Group = createGroup("child3Group"); + groupService.addMember(context, parentGroup, childGroup); + groupService.addMember(context, parentGroup, child2Group); + groupService.addMember(context, parentGroup, child3Group); + groupService.update(context, parentGroup); + + try { + // Assert that findByParent is the same list of groups as getMemberGroups() when pagination is ignored + // (NOTE: Pagination is tested in GroupRestRepositoryIT) + // NOTE: isEqualCollection() must be used for comparison because Hibernate's "PersistentBag" cannot be + // compared directly to a List. See https://stackoverflow.com/a/57399383/3750035 + assertTrue( + CollectionUtils.isEqualCollection(parentGroup.getMemberGroups(), + groupService.findByParent(context, parentGroup, -1, -1))); + // Assert countBy parent is the same as the size of group members + assertEquals(parentGroup.getMemberGroups().size(), groupService.countByParent(context, parentGroup)); + } finally { + // Clean up our data + context.turnOffAuthorisationSystem(); + groupService.delete(context, parentGroup); + groupService.delete(context, childGroup); + groupService.delete(context, child2Group); + groupService.delete(context, child3Group); + context.restoreAuthSystemState(); + } + } + + @Test + // Tests searchNonMembers() and searchNonMembersCount() + // NOTE: This does not test pagination as that is tested in GroupRestRepositoryIT in server-webapp + public void searchAndCountNonMembers() throws SQLException, AuthorizeException, IOException { + // Create a parent group with 2 child groups + Group parentGroup = createGroup("Some Parent Group"); + Group someStaffGroup = createGroup("Some Other Staff"); + Group someStudentsGroup = createGroup("Some Students"); + groupService.addMember(context, parentGroup, someStaffGroup); + groupService.addMember(context, parentGroup, someStudentsGroup); + groupService.update(context, parentGroup); + + // Create a separate parent which is not a member of the first & add two child groups to it + Group studentsNotInParentGroup = createGroup("Students not in Parent"); + Group otherStudentsNotInParentGroup = createGroup("Other Students"); + Group someOtherStudentsNotInParentGroup = createGroup("Some Other Students"); + groupService.addMember(context, studentsNotInParentGroup, otherStudentsNotInParentGroup); + groupService.addMember(context, studentsNotInParentGroup, someOtherStudentsNotInParentGroup); + groupService.update(context, studentsNotInParentGroup); + + try { + // Assert that all Groups *not* in parent group match an empty search + List notInParent = Arrays.asList(studentsNotInParentGroup, otherStudentsNotInParentGroup, + someOtherStudentsNotInParentGroup); + List nonMembersSearch = groupService.searchNonMembers(context, "", parentGroup, -1, -1); + // NOTE: Because others unit tests create groups, this search will return an undetermined number of results. + // Therefore, we just verify that our expected groups are included and others are NOT included. + assertTrue(nonMembersSearch.containsAll(notInParent)); + // Verify it does NOT contain members of parentGroup + assertFalse(nonMembersSearch.contains(someStaffGroup)); + assertFalse(nonMembersSearch.contains(someStudentsGroup)); + // Verify it also does NOT contain the parentGroup itself + assertFalse(nonMembersSearch.contains(parentGroup)); + // Verify the count for empty search matches the size of the search results + assertEquals(nonMembersSearch.size(), groupService.searchNonMembersCount(context, "", parentGroup)); + + // Assert a search on "Students" matches all those same groups (as they all include that word in their name) + nonMembersSearch = groupService.searchNonMembers(context, "Students", parentGroup, -1, -1); + assertTrue(nonMembersSearch.containsAll(notInParent)); + //Verify an existing member group with "Students" in its name does NOT get returned + assertFalse(nonMembersSearch.contains(someStudentsGroup)); + assertEquals(nonMembersSearch.size(), + groupService.searchNonMembersCount(context, "Students", parentGroup)); + + + // Assert a search on "other" matches just two groups + // (this also tests search is case insensitive) + nonMembersSearch = groupService.searchNonMembers(context, "other", parentGroup, -1, -1); + assertTrue(nonMembersSearch.containsAll( + Arrays.asList(otherStudentsNotInParentGroup, someOtherStudentsNotInParentGroup))); + // Verify an existing member group with "Other" in its name does NOT get returned + assertFalse(nonMembersSearch.contains(someStaffGroup)); + assertEquals(nonMembersSearch.size(), groupService.searchNonMembersCount(context, "other", parentGroup)); + + // Assert a search on "Parent" matches just one group + nonMembersSearch = groupService.searchNonMembers(context, "Parent", parentGroup, -1, -1); + assertTrue(nonMembersSearch.contains(studentsNotInParentGroup)); + // Verify Parent Group itself does NOT get returned + assertFalse(nonMembersSearch.contains(parentGroup)); + assertEquals(nonMembersSearch.size(), groupService.searchNonMembersCount(context, "Parent", parentGroup)); + + // Assert a UUID search matching a non-member group will return just that one group + nonMembersSearch = groupService.searchNonMembers(context, + someOtherStudentsNotInParentGroup.getID().toString(), + parentGroup, -1, -1); + assertEquals(1, nonMembersSearch.size()); + assertTrue(nonMembersSearch.contains(someOtherStudentsNotInParentGroup)); + assertEquals(nonMembersSearch.size(), + groupService.searchNonMembersCount(context, + someOtherStudentsNotInParentGroup.getID().toString(), + parentGroup)); + + // Assert a UUID search matching an EXISTING member will return NOTHING + // (as this group is excluded from the search) + nonMembersSearch = groupService.searchNonMembers(context, someStudentsGroup.getID().toString(), + parentGroup,-1, -1); + assertEquals(0, nonMembersSearch.size()); + assertEquals(nonMembersSearch.size(), + groupService.searchNonMembersCount(context, someStudentsGroup.getID().toString(), + parentGroup)); + + // Assert a UUID search matching Parent Group *itself* will return NOTHING + // (as this group is excluded from the search) + nonMembersSearch = groupService.searchNonMembers(context, parentGroup.getID().toString(), + parentGroup,-1, -1); + assertEquals(0, nonMembersSearch.size()); + assertEquals(nonMembersSearch.size(), + groupService.searchNonMembersCount(context, parentGroup.getID().toString(), + parentGroup)); + } finally { + // Clean up our data + context.turnOffAuthorisationSystem(); + groupService.delete(context, parentGroup); + groupService.delete(context, someStaffGroup); + groupService.delete(context, someStudentsGroup); + groupService.delete(context, studentsNotInParentGroup); + groupService.delete(context, otherStudentsNotInParentGroup); + groupService.delete(context, someOtherStudentsNotInParentGroup); + context.restoreAuthSystemState(); + } + + } + protected Group createGroup(String name) throws SQLException, AuthorizeException { context.turnOffAuthorisationSystem(); diff --git a/dspace-api/src/test/java/org/dspace/harvest/OAIHarvesterIT.java b/dspace-api/src/test/java/org/dspace/harvest/OAIHarvesterIT.java index b60f4b5e6939..b305ccc18061 100644 --- a/dspace-api/src/test/java/org/dspace/harvest/OAIHarvesterIT.java +++ b/dspace-api/src/test/java/org/dspace/harvest/OAIHarvesterIT.java @@ -74,6 +74,7 @@ import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.util.UUIDUtils; +import org.dspace.validation.LicenseValidator; import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory; import org.dspace.xmlworkflow.storedcomponents.PoolTask; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; @@ -768,7 +769,7 @@ public void testRunHarvestWithPublicationAndThenPerson() throws Exception { Item publication = publications.get(0); List values = publication.getMetadata(); - assertThat(values, hasSize(17)); + assertThat(values, hasSize(21)); assertThat(values, hasItems(with("dc.title", "Test Publication"))); assertThat(values, hasItems(with("dc.type", "Controlled Vocabulary for Resource Type Genres::text"))); @@ -779,6 +780,8 @@ public void testRunHarvestWithPublicationAndThenPerson() throws Exception { assertThat(values, hasItems(with("oaire.citation.endPage", "180"))); assertThat(values, hasItems(with("dc.identifier.doi", "10.1007/978-3-642-35233-1_18"))); assertThat(values, hasItems(with("oairecerif.author.affiliation", PLACEHOLDER_PARENT_METADATA_VALUE))); + assertThat(values, hasItems(with("cris.virtual.department", PLACEHOLDER_PARENT_METADATA_VALUE))); + assertThat(values, hasItems(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE))); assertThat(values, hasItems(with("cris.sourceId", "test-harvest::3"))); assertThat(values, hasItems(with("dspace.entity.type", "Publication"))); @@ -871,7 +874,7 @@ public void testRunHarvestWithPersonAndThenPublication() throws Exception { Item publication = findItemByOaiID("oai:test-harvest:Publications/3", collection); values = publication.getMetadata(); - assertThat(values, hasSize(19)); + assertThat(values, hasSize(21)); assertThat(values, hasItems(with("dc.title", "Test Publication"))); assertThat(values, hasItems(with("dc.type", "Controlled Vocabulary for Resource Type Genres::text"))); @@ -884,6 +887,8 @@ public void testRunHarvestWithPersonAndThenPublication() throws Exception { assertThat(values, hasItems(with("oairecerif.author.affiliation", PLACEHOLDER_PARENT_METADATA_VALUE))); assertThat(values, hasItems(with("cris.sourceId", "test-harvest::3"))); assertThat(values, hasItems(with("dspace.entity.type", "Publication"))); + assertThat(values, hasItems(with("cris.virtual.department", PLACEHOLDER_PARENT_METADATA_VALUE))); + assertThat(values, hasItems(with("cris.virtualsource.department", UUIDUtils.toString(person.getID())))); assertThat(values, hasItems(with("cris.virtual.author-orcid", "0000-0002-9079-5932"))); assertThat(values, hasItems(with("cris.virtualsource.author-orcid", UUIDUtils.toString(person.getID())))); @@ -1302,7 +1307,8 @@ public void testRunHarvestWithEmailSentIfItemValidationFails() throws Exception assertThat(errorDetails.getMessages(), hasSize(2)); assertThat(errorDetails.getMessages(), hasItem("error.validation.filerequired - [/sections/upload]")); assertThat(errorDetails.getMessages(), - hasItem("error.validation.license.notgranted - [/sections/license]")); + hasItem(LicenseValidator.ERROR_VALIDATION_LICENSEREQUIRED + " - [/sections/license]") + ); verifyNoMoreInteractions(mockClient, mockEmailSender); @@ -1425,7 +1431,7 @@ public void testRunHarvestWithEmailSentIfItemAndRecordValidationFails() throws E List messages = errorDetails.getMessages(); assertThat(messages, hasSize(3)); assertThat(messages, hasItem("error.validation.filerequired - [/sections/upload]")); - assertThat(messages, hasItem("error.validation.license.notgranted - [/sections/license]")); + assertThat(messages, hasItem(LicenseValidator.ERROR_VALIDATION_LICENSEREQUIRED + " - [/sections/license]")); assertThat(messages, hasItem("error.validation.required - [/sections/publication/dc.date.issued]")); errorDetails = errors.get("oai:test-harvest:Publications/123456789/1002"); @@ -1433,7 +1439,7 @@ public void testRunHarvestWithEmailSentIfItemAndRecordValidationFails() throws E messages = errorDetails.getMessages(); assertThat(messages, hasSize(3)); assertThat(messages, hasItem("error.validation.filerequired - [/sections/upload]")); - assertThat(messages, hasItem("error.validation.license.notgranted - [/sections/license]")); + assertThat(messages, hasItem(LicenseValidator.ERROR_VALIDATION_LICENSEREQUIRED + " - [/sections/license]")); assertThat(errorDetails.getMessages(), hasItem(containsString("Element 'oai_cerif:Publishers' " + "cannot have character [children]"))); @@ -1442,7 +1448,7 @@ public void testRunHarvestWithEmailSentIfItemAndRecordValidationFails() throws E messages = errorDetails.getMessages(); assertThat(messages, hasSize(2)); assertThat(messages, hasItem("error.validation.filerequired - [/sections/upload]")); - assertThat(messages, hasItem("error.validation.license.notgranted - [/sections/license]")); + assertThat(messages, hasItem(LicenseValidator.ERROR_VALIDATION_LICENSEREQUIRED + " - [/sections/license]")); verifyNoMoreInteractions(mockClient, mockEmailSender); diff --git a/dspace-api/src/test/java/org/dspace/identifier/DOIIdentifierProviderTest.java b/dspace-api/src/test/java/org/dspace/identifier/DOIIdentifierProviderTest.java index db2be516ae49..4241ba26f223 100644 --- a/dspace-api/src/test/java/org/dspace/identifier/DOIIdentifierProviderTest.java +++ b/dspace-api/src/test/java/org/dspace/identifier/DOIIdentifierProviderTest.java @@ -187,9 +187,9 @@ private Item newItem() provider.delete(context, item); List metadata = itemService.getMetadata(item, - DOIIdentifierProvider.MD_SCHEMA, - DOIIdentifierProvider.DOI_ELEMENT, - DOIIdentifierProvider.DOI_QUALIFIER, + provider.MD_SCHEMA, + provider.DOI_ELEMENT, + provider.DOI_QUALIFIER, null); List remainder = new ArrayList<>(); @@ -200,13 +200,13 @@ private Item newItem() } itemService.clearMetadata(context, item, - DOIIdentifierProvider.MD_SCHEMA, - DOIIdentifierProvider.DOI_ELEMENT, - DOIIdentifierProvider.DOI_QUALIFIER, + provider.MD_SCHEMA, + provider.DOI_ELEMENT, + provider.DOI_QUALIFIER, null); - itemService.addMetadata(context, item, DOIIdentifierProvider.MD_SCHEMA, - DOIIdentifierProvider.DOI_ELEMENT, - DOIIdentifierProvider.DOI_QUALIFIER, + itemService.addMetadata(context, item, provider.MD_SCHEMA, + provider.DOI_ELEMENT, + provider.DOI_QUALIFIER, null, remainder); @@ -252,9 +252,9 @@ public String createDOI(Item item, Integer status, boolean metadata, String doi) doiService.update(context, doiRow); if (metadata) { - itemService.addMetadata(context, item, DOIIdentifierProvider.MD_SCHEMA, - DOIIdentifierProvider.DOI_ELEMENT, - DOIIdentifierProvider.DOI_QUALIFIER, + itemService.addMetadata(context, item, provider.MD_SCHEMA, + provider.DOI_ELEMENT, + provider.DOI_QUALIFIER, null, doiService.DOIToExternalForm(doi)); itemService.update(context, item); @@ -315,9 +315,9 @@ public void testStore_DOI_as_item_metadata() provider.saveDOIToObject(context, item, doi); context.restoreAuthSystemState(); - List metadata = itemService.getMetadata(item, DOIIdentifierProvider.MD_SCHEMA, - DOIIdentifierProvider.DOI_ELEMENT, - DOIIdentifierProvider.DOI_QUALIFIER, + List metadata = itemService.getMetadata(item, provider.MD_SCHEMA, + provider.DOI_ELEMENT, + provider.DOI_QUALIFIER, null); boolean result = false; for (MetadataValue id : metadata) { @@ -337,9 +337,9 @@ public void testGet_DOI_out_of_item_metadata() + Long.toHexString(new Date().getTime()); context.turnOffAuthorisationSystem(); - itemService.addMetadata(context, item, DOIIdentifierProvider.MD_SCHEMA, - DOIIdentifierProvider.DOI_ELEMENT, - DOIIdentifierProvider.DOI_QUALIFIER, + itemService.addMetadata(context, item, provider.MD_SCHEMA, + provider.DOI_ELEMENT, + provider.DOI_QUALIFIER, null, doiService.DOIToExternalForm(doi)); itemService.update(context, item); @@ -358,9 +358,9 @@ public void testRemove_DOI_from_item_metadata() + Long.toHexString(new Date().getTime()); context.turnOffAuthorisationSystem(); - itemService.addMetadata(context, item, DOIIdentifierProvider.MD_SCHEMA, - DOIIdentifierProvider.DOI_ELEMENT, - DOIIdentifierProvider.DOI_QUALIFIER, + itemService.addMetadata(context, item, provider.MD_SCHEMA, + provider.DOI_ELEMENT, + provider.DOI_QUALIFIER, null, doiService.DOIToExternalForm(doi)); itemService.update(context, item); @@ -368,9 +368,9 @@ public void testRemove_DOI_from_item_metadata() provider.removeDOIFromObject(context, item, doi); context.restoreAuthSystemState(); - List metadata = itemService.getMetadata(item, DOIIdentifierProvider.MD_SCHEMA, - DOIIdentifierProvider.DOI_ELEMENT, - DOIIdentifierProvider.DOI_QUALIFIER, + List metadata = itemService.getMetadata(item, provider.MD_SCHEMA, + provider.DOI_ELEMENT, + provider.DOI_QUALIFIER, null); boolean foundDOI = false; for (MetadataValue id : metadata) { @@ -456,9 +456,9 @@ public void testRemove_two_DOIs_from_item_metadata() context.restoreAuthSystemState(); // assure that the right one was removed - List metadata = itemService.getMetadata(item, DOIIdentifierProvider.MD_SCHEMA, - DOIIdentifierProvider.DOI_ELEMENT, - DOIIdentifierProvider.DOI_QUALIFIER, + List metadata = itemService.getMetadata(item, provider.MD_SCHEMA, + provider.DOI_ELEMENT, + provider.DOI_QUALIFIER, null); boolean foundDOI1 = false; boolean foundDOI2 = false; @@ -480,9 +480,9 @@ public void testRemove_two_DOIs_from_item_metadata() context.restoreAuthSystemState(); // check it - metadata = itemService.getMetadata(item, DOIIdentifierProvider.MD_SCHEMA, - DOIIdentifierProvider.DOI_ELEMENT, - DOIIdentifierProvider.DOI_QUALIFIER, + metadata = itemService.getMetadata(item, provider.MD_SCHEMA, + provider.DOI_ELEMENT, + provider.DOI_QUALIFIER, null); foundDOI1 = false; foundDOI2 = false; @@ -691,9 +691,9 @@ public void testDelete_specified_DOI() context.restoreAuthSystemState(); // assure that the right one was removed - List metadata = itemService.getMetadata(item, DOIIdentifierProvider.MD_SCHEMA, - DOIIdentifierProvider.DOI_ELEMENT, - DOIIdentifierProvider.DOI_QUALIFIER, + List metadata = itemService.getMetadata(item, provider.MD_SCHEMA, + provider.DOI_ELEMENT, + provider.DOI_QUALIFIER, null); boolean foundDOI1 = false; boolean foundDOI2 = false; @@ -733,9 +733,9 @@ public void testDelete_all_DOIs() context.restoreAuthSystemState(); // assure that the right one was removed - List metadata = itemService.getMetadata(item, DOIIdentifierProvider.MD_SCHEMA, - DOIIdentifierProvider.DOI_ELEMENT, - DOIIdentifierProvider.DOI_QUALIFIER, + List metadata = itemService.getMetadata(item, provider.MD_SCHEMA, + provider.DOI_ELEMENT, + provider.DOI_QUALIFIER, null); boolean foundDOI1 = false; boolean foundDOI2 = false; diff --git a/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderIT.java b/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderIT.java new file mode 100644 index 000000000000..7e549f6cae33 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderIT.java @@ -0,0 +1,115 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.identifier; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.authorize.AuthorizeException; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.VersionBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.kernel.ServiceManager; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.Before; +import org.junit.Test; + +public class VersionedHandleIdentifierProviderIT extends AbstractIntegrationTestWithDatabase { + private ServiceManager serviceManager; + private IdentifierServiceImpl identifierService; + + private String firstHandle; + + private Collection collection; + private Item itemV1; + private Item itemV2; + private Item itemV3; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + + serviceManager = DSpaceServicesFactory.getInstance().getServiceManager(); + identifierService = serviceManager.getServicesByType(IdentifierServiceImpl.class).get(0); + // Clean out providers to avoid any being used for creation of community and collection + identifierService.setProviders(new ArrayList<>()); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + } + + private void registerProvider(Class type) { + // Register our new provider + serviceManager.registerServiceClass(type.getName(), type); + IdentifierProvider identifierProvider = + (IdentifierProvider) serviceManager.getServiceByName(type.getName(), type); + + // Overwrite the identifier-service's providers with the new one to ensure only this provider is used + identifierService.setProviders(List.of(identifierProvider)); + } + + private void createVersions() throws SQLException, AuthorizeException { + itemV1 = ItemBuilder.createItem(context, collection) + .withTitle("First version") + .build(); + firstHandle = itemV1.getHandle(); + itemV2 = VersionBuilder.createVersion(context, itemV1, "Second version").build().getItem(); + itemV3 = VersionBuilder.createVersion(context, itemV1, "Third version").build().getItem(); + } + + @Test + public void testDefaultVersionedHandleProvider() throws Exception { + registerProvider(VersionedHandleIdentifierProvider.class); + createVersions(); + + // Confirm the original item only has its original handle + assertEquals(firstHandle, itemV1.getHandle()); + assertEquals(1, itemV1.getHandles().size()); + // Confirm the second item has the correct version handle + assertEquals(firstHandle + ".2", itemV2.getHandle()); + assertEquals(1, itemV2.getHandles().size()); + // Confirm the last item has the correct version handle + assertEquals(firstHandle + ".3", itemV3.getHandle()); + assertEquals(1, itemV3.getHandles().size()); + } + + @Test + public void testCanonicalVersionedHandleProvider() throws Exception { + registerProvider(VersionedHandleIdentifierProviderWithCanonicalHandles.class); + createVersions(); + + // Confirm the original item only has a version handle + assertEquals(firstHandle + ".1", itemV1.getHandle()); + assertEquals(1, itemV1.getHandles().size()); + // Confirm the second item has the correct version handle + assertEquals(firstHandle + ".2", itemV2.getHandle()); + assertEquals(1, itemV2.getHandles().size()); + // Confirm the last item has both the correct version handle and the original handle + assertEquals(firstHandle, itemV3.getHandle()); + assertEquals(2, itemV3.getHandles().size()); + containsHandle(itemV3, firstHandle + ".3"); + } + + private void containsHandle(Item item, String handle) { + assertTrue(item.getHandles().stream().anyMatch(h -> handle.equals(h.getHandle()))); + } +} diff --git a/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderTest.java b/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderTest.java new file mode 100644 index 000000000000..1bc6bf140832 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderTest.java @@ -0,0 +1,115 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.identifier; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.authorize.AuthorizeException; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.VersionBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.kernel.ServiceManager; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.Before; +import org.junit.Test; + +public class VersionedHandleIdentifierProviderTest extends AbstractIntegrationTestWithDatabase { + private ServiceManager serviceManager; + private IdentifierServiceImpl identifierService; + + private String firstHandle; + + private Collection collection; + private Item itemV1; + private Item itemV2; + private Item itemV3; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + + serviceManager = DSpaceServicesFactory.getInstance().getServiceManager(); + identifierService = serviceManager.getServicesByType(IdentifierServiceImpl.class).get(0); + // Clean out providers to avoid any being used for creation of community and collection + identifierService.setProviders(new ArrayList<>()); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + } + + private void registerProvider(Class type) { + // Register our new provider + serviceManager.registerServiceClass(type.getName(), type); + IdentifierProvider identifierProvider = + (IdentifierProvider) serviceManager.getServiceByName(type.getName(), type); + + // Overwrite the identifier-service's providers with the new one to ensure only this provider is used + identifierService.setProviders(List.of(identifierProvider)); + } + + private void createVersions() throws SQLException, AuthorizeException { + itemV1 = ItemBuilder.createItem(context, collection) + .withTitle("First version") + .build(); + firstHandle = itemV1.getHandle(); + itemV2 = VersionBuilder.createVersion(context, itemV1, "Second version").build().getItem(); + itemV3 = VersionBuilder.createVersion(context, itemV1, "Third version").build().getItem(); + } + + @Test + public void testDefaultVersionedHandleProvider() throws Exception { + registerProvider(VersionedHandleIdentifierProvider.class); + createVersions(); + + // Confirm the original item only has its original handle + assertEquals(firstHandle, itemV1.getHandle()); + assertEquals(1, itemV1.getHandles().size()); + // Confirm the second item has the correct version handle + assertEquals(firstHandle + ".2", itemV2.getHandle()); + assertEquals(1, itemV2.getHandles().size()); + // Confirm the last item has the correct version handle + assertEquals(firstHandle + ".3", itemV3.getHandle()); + assertEquals(1, itemV3.getHandles().size()); + } + + @Test + public void testCanonicalVersionedHandleProvider() throws Exception { + registerProvider(VersionedHandleIdentifierProviderWithCanonicalHandles.class); + createVersions(); + + // Confirm the original item only has a version handle + assertEquals(firstHandle + ".1", itemV1.getHandle()); + assertEquals(1, itemV1.getHandles().size()); + // Confirm the second item has the correct version handle + assertEquals(firstHandle + ".2", itemV2.getHandle()); + assertEquals(1, itemV2.getHandles().size()); + // Confirm the last item has both the correct version handle and the original handle + assertEquals(firstHandle, itemV3.getHandle()); + assertEquals(2, itemV3.getHandles().size()); + containsHandle(itemV3, firstHandle + ".3"); + } + + private void containsHandle(Item item, String handle) { + assertTrue(item.getHandles().stream().anyMatch(h -> handle.equals(h.getHandle()))); + } +} diff --git a/dspace-api/src/test/java/org/dspace/iiif/canvasdimension/CanvasDimensionsIT.java b/dspace-api/src/test/java/org/dspace/iiif/canvasdimension/CanvasDimensionsIT.java index 038654af438d..7dba38c987b7 100644 --- a/dspace-api/src/test/java/org/dspace/iiif/canvasdimension/CanvasDimensionsIT.java +++ b/dspace-api/src/test/java/org/dspace/iiif/canvasdimension/CanvasDimensionsIT.java @@ -353,6 +353,40 @@ public void processItemWithExistingMetadata() throws Exception { } + + @Test + public void processItemWithJp2File() throws Exception { + context.turnOffAuthorisationSystem(); + // Create a new Item + iiifItem = ItemBuilder.createItem(context, col1) + .withTitle("Test Item") + .withIssueDate("2017-10-17") + .enableIIIF() + .build(); + + // Add jp2 image to verify image server call for dimensions + InputStream input = this.getClass().getResourceAsStream("cat.jp2"); + bitstream = BitstreamBuilder + .createBitstream(context, iiifItem, input) + .withName("Bitstream2.jp2") + .withMimeType("image/jp2") + .build(); + + context.restoreAuthSystemState(); + + String id = iiifItem.getID().toString(); + + execCanvasScript(id); + + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT)) + .anyMatch(m -> m.getValue().contentEquals("64"))); + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH)) + .anyMatch(m -> m.getValue().contentEquals("64"))); + + } + @Test public void processParentCommunityWithMaximum() throws Exception { context.turnOffAuthorisationSystem(); diff --git a/dspace-api/src/test/java/org/dspace/layout/script/CrisLayoutToolScriptIT.java b/dspace-api/src/test/java/org/dspace/layout/script/CrisLayoutToolScriptIT.java index e2c264386a64..86786fa9f256 100644 --- a/dspace-api/src/test/java/org/dspace/layout/script/CrisLayoutToolScriptIT.java +++ b/dspace-api/src/test/java/org/dspace/layout/script/CrisLayoutToolScriptIT.java @@ -274,7 +274,7 @@ public void testWithValidLayout() throws InstantiationException, IllegalAccessEx assertThat(tabService.findAll(context), hasSize(3)); - List personTabs = tabService.findByEntityType(context, "Person"); + List personTabs = tabService.findByEntityType(context, "Person", null); assertThat(personTabs, hasSize(2)); CrisLayoutTab firstPersonTab = personTabs.get(0); @@ -369,7 +369,7 @@ public void testWithValidLayout() throws InstantiationException, IllegalAccessEx assertThat(profileResearchoutputsBox.getGroupSecurityFields(), contains(matches(groupField -> groupField.getName().equals("Researchers")))); - List publicationTabs = tabService.findByEntityType(context, "Publication"); + List publicationTabs = tabService.findByEntityType(context, "Publication", null); assertThat(publicationTabs, hasSize(1)); CrisLayoutTab publicationTab = publicationTabs.get(0); diff --git a/dspace-api/src/test/java/org/dspace/layout/service/impl/CrisLayoutBoxServiceImplTest.java b/dspace-api/src/test/java/org/dspace/layout/service/impl/CrisLayoutBoxServiceImplTest.java index fd61aeb0a762..ce539cda67aa 100644 --- a/dspace-api/src/test/java/org/dspace/layout/service/impl/CrisLayoutBoxServiceImplTest.java +++ b/dspace-api/src/test/java/org/dspace/layout/service/impl/CrisLayoutBoxServiceImplTest.java @@ -291,6 +291,31 @@ public void testIiifBoxHasNoContentWithMetadataUndefined() { assertFalse(crisLayoutBoxService.hasContent(context, box, item)); } + @Test + public void testSingleMetadataboxBitstreamWithoutField() throws SQLException { + + CrisLayoutBox singleBitstreamBox = new CrisLayoutBox(); + singleBitstreamBox.setShortname("File"); + singleBitstreamBox.setType(null); + + Item item = item(); + Bitstream bitstream = mock(Bitstream.class); + + CrisLayoutFieldBitstream fieldBitstream = new CrisLayoutFieldBitstream(); + fieldBitstream.setBundle("ORIGINAL"); + fieldBitstream.setMetadataValue(null); + fieldBitstream.setMetadataField(null); + fieldBitstream.setRendering("attachment"); + + singleBitstreamBox.addLayoutField(fieldBitstream); + + when(bitstreamService.findShowableByItem(context, item.getID(), "ORIGINAL", Map.of())) + .thenReturn(List.of(bitstream)); + + assertThat(crisLayoutBoxService.hasContent(context, singleBitstreamBox, item), is(true)); + + } + private CrisLayoutBox crisLayoutMetadataBox(String shortname, MetadataField... metadataFields) { return crisLayoutBox(shortname, CrisLayoutBoxTypes.METADATA.name(), metadataFields); } diff --git a/dspace-api/src/test/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImplTest.java b/dspace-api/src/test/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImplTest.java index 7260f43849b5..3e439c98010c 100644 --- a/dspace-api/src/test/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImplTest.java +++ b/dspace-api/src/test/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImplTest.java @@ -32,6 +32,7 @@ import org.dspace.layout.CrisLayoutRow; import org.dspace.layout.CrisLayoutTab; import org.dspace.layout.dao.CrisLayoutTabDAO; +import org.dspace.services.ConfigurationService; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; @@ -55,6 +56,8 @@ public class CrisLayoutTabServiceImplTest { private AuthorizeService authorizeService; @Mock private ItemService itemService; + @Mock + private ConfigurationService configurationService; @InjectMocks private CrisLayoutTabServiceImpl crisLayoutTabService; @@ -97,7 +100,7 @@ public void allTabsAreReturned() throws SQLException { when(itemService.getMetadata(item, "dspace.entity.type")) .thenReturn(entityType); - when(tabDao.findByEntityTypeAndEagerlyFetchBoxes(context, entityType)) + when(tabDao.findByEntityTypeAndEagerlyFetchBoxes(context, entityType, null)) .thenReturn(Arrays.asList(tabOne, tabTwo, tabThree, tabWithoutBoxes, tabWithOnlyForbiddenBoxes)); List tabs = crisLayoutTabService.findByItem(context, itemUuid); @@ -120,7 +123,7 @@ public void noTabsFoundForEntityType() throws SQLException { when(itemService.getMetadata(item, "dspace.entity.type")) .thenReturn(entityType); - when(tabDao.findByEntityTypeAndEagerlyFetchBoxes(context, entityType)).thenReturn(emptyList()); + when(tabDao.findByEntityTypeAndEagerlyFetchBoxes(context, entityType, null)).thenReturn(emptyList()); List tabs = crisLayoutTabService.findByItem(context, itemUuid); @@ -140,7 +143,7 @@ public void nullTabsFoundForEntityType() throws SQLException { when(itemService.getMetadata(item, "dspace.entity.type")) .thenReturn(entityType); - when(tabDao.findByEntityTypeAndEagerlyFetchBoxes(context, entityType)).thenReturn(List.of()); + when(tabDao.findByEntityTypeAndEagerlyFetchBoxes(context, entityType, null)).thenReturn(List.of()); List tabs = crisLayoutTabService.findByItem(context, itemUuid); diff --git a/dspace-api/src/test/java/org/dspace/orcid/OrcidQueueConsumerIT.java b/dspace-api/src/test/java/org/dspace/orcid/OrcidQueueConsumerIT.java index 1cec9473ba22..a1ebec2197e4 100644 --- a/dspace-api/src/test/java/org/dspace/orcid/OrcidQueueConsumerIT.java +++ b/dspace-api/src/test/java/org/dspace/orcid/OrcidQueueConsumerIT.java @@ -26,6 +26,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; import java.sql.SQLException; import java.time.Instant; @@ -42,13 +43,19 @@ import org.dspace.content.Collection; import org.dspace.content.Item; import org.dspace.content.MetadataValue; +import org.dspace.content.WorkspaceItem; import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.InstallItemService; import org.dspace.content.service.ItemService; +import org.dspace.content.service.WorkspaceItemService; import org.dspace.orcid.consumer.OrcidQueueConsumer; import org.dspace.orcid.factory.OrcidServiceFactory; import org.dspace.orcid.service.OrcidQueueService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.utils.DSpace; +import org.dspace.versioning.Version; +import org.dspace.versioning.service.VersioningService; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -65,8 +72,15 @@ public class OrcidQueueConsumerIT extends AbstractIntegrationTestWithDatabase { private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + private WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); + + private InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService(); + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + private VersioningService versioningService = new DSpace().getServiceManager() + .getServicesByType(VersioningService.class).get(0); + private Collection profileCollection; @Before @@ -966,6 +980,113 @@ public void testOrcidQueueRecordCreationForPublicationWithNotFoundAuthority() th assertThat(orcidQueueRecords.get(0), matches(profile, publication, "Publication", INSERT)); } + @Test + public void testOrcidQueueWithItemVersioning() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withOrcidSynchronizationPublicationsPreference(ALL) + .build(); + + Collection publicationCollection = createCollection("Publications", "Publication"); + + Item publication = ItemBuilder.createItem(context, publicationCollection) + .withTitle("Test publication") + .withAuthor("Test User", profile.getID().toString()) + .build(); + + context.commit(); + + List orcidQueueRecords = orcidQueueService.findAll(context); + assertThat(orcidQueueRecords, hasSize(1)); + assertThat(orcidQueueRecords.get(0), matches(profile, publication, "Publication", INSERT)); + + Version newVersion = versioningService.createNewVersion(context, publication); + Item newPublication = newVersion.getItem(); + assertThat(newPublication.isArchived(), is(false)); + + context.commit(); + + orcidQueueRecords = orcidQueueService.findAll(context); + assertThat(orcidQueueRecords, hasSize(1)); + assertThat(orcidQueueRecords.get(0), matches(profile, publication, "Publication", INSERT)); + + WorkspaceItem workspaceItem = workspaceItemService.findByItem(context, newVersion.getItem()); + installItemService.installItem(context, workspaceItem); + + context.commit(); + + context.restoreAuthSystemState(); + + orcidQueueRecords = orcidQueueService.findAll(context); + assertThat(orcidQueueRecords, hasSize(1)); + assertThat(orcidQueueRecords.get(0), matches(profile, newPublication, "Publication", INSERT)); + + } + + @Test + public void testOrcidQueueUpdateWithItemVersioning() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withOrcidSynchronizationPublicationsPreference(ALL) + .build(); + + Collection publicationCollection = createCollection("Publications", "Publication"); + + Item publication = ItemBuilder.createItem(context, publicationCollection) + .withTitle("Test publication") + .build(); + + OrcidHistory orcidHistory = OrcidHistoryBuilder.createOrcidHistory(context, profile, publication) + .withDescription("Test publication") + .withOperation(OrcidOperation.INSERT) + .withPutCode("12345") + .withStatus(201) + .build(); + + addMetadata(publication, "dc", "contributor", "author", "Test User", profile.getID().toString()); + + context.commit(); + + List orcidQueueRecords = orcidQueueService.findAll(context); + assertThat(orcidQueueRecords, hasSize(1)); + assertThat(orcidQueueRecords.get(0), matches(profile, publication, "Publication", "12345", UPDATE)); + + Version newVersion = versioningService.createNewVersion(context, publication); + Item newPublication = newVersion.getItem(); + assertThat(newPublication.isArchived(), is(false)); + + context.commit(); + + orcidQueueRecords = orcidQueueService.findAll(context); + assertThat(orcidQueueRecords, hasSize(1)); + assertThat(orcidQueueRecords.get(0), matches(profile, publication, "Publication", "12345", UPDATE)); + + WorkspaceItem workspaceItem = workspaceItemService.findByItem(context, newVersion.getItem()); + installItemService.installItem(context, workspaceItem); + + context.commit(); + + context.restoreAuthSystemState(); + + orcidQueueRecords = orcidQueueService.findAll(context); + assertThat(orcidQueueRecords, hasSize(1)); + assertThat(orcidQueueRecords.get(0), matches(profile, newPublication, "Publication", "12345", UPDATE)); + + orcidHistory = context.reloadEntity(orcidHistory); + assertThat(orcidHistory.getEntity(), is(newPublication)); + + } + private void addMetadata(Item item, String schema, String element, String qualifier, String value, String authority) throws Exception { context.turnOffAuthorisationSystem(); diff --git a/dspace-api/src/test/java/org/dspace/orcid/script/OrcidBulkPushIT.java b/dspace-api/src/test/java/org/dspace/orcid/script/OrcidBulkPushIT.java index d9718507ba68..417ccfeeb1d6 100644 --- a/dspace-api/src/test/java/org/dspace/orcid/script/OrcidBulkPushIT.java +++ b/dspace-api/src/test/java/org/dspace/orcid/script/OrcidBulkPushIT.java @@ -209,6 +209,62 @@ public void testWithManyOrcidQueueRecords() throws Exception { } + @Test + public void testWithVeryLongTitleQueueRecords() throws Exception { + Item firstProfileItem = createOwnerItem("0000-1111-2222-3333", BATCH, eperson); + Item firstEntity = createPublication("Publication with a very very very very very very very very very " + + "very very very very very very very very very very very very very very very very very very very very " + + "very very very very very very very very very very very very very very very very very even " + + "extremely long title"); + + when(orcidClientMock.push(any(), eq("0000-1111-2222-3333"), any())) + .thenReturn(createdResponse("12345")); + + when(orcidClientMock.update(any(), eq("0000-1111-2222-3333"), any(), eq("98765"))) + .thenReturn(updatedResponse("98765")); + + when(orcidClientMock.deleteByPutCode( + any(), + eq("0000-1111-2222-3333"), + eq("22222"), + eq("/work")) + ).thenReturn(deletedResponse()); + + createOrcidQueue(context, firstProfileItem, firstEntity); + createOrcidQueue(context, firstProfileItem, "Description", "Publication", "22222"); + + context.commit(); + + TestDSpaceRunnableHandler handler = runBulkSynchronization(false); + + String firstProfileItemId = firstProfileItem.getID().toString(); + + assertThat(handler.getInfoMessages(), hasSize(5)); + assertThat(handler.getInfoMessages(), containsInAnyOrder( + "Found 2 queue records to synchronize with ORCID", + "Addition of Publication for profile with ID: " + firstProfileItemId, + "History record created with status 201. The operation was completed successfully", + "Deletion of Publication for profile with ID: " + firstProfileItemId + " by put code 22222", + "History record created with status 204. The operation was completed successfully")); + + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), empty()); + + verify(orcidClientMock).push(any(), eq("0000-1111-2222-3333"), any()); + verify(orcidClientMock).deleteByPutCode( + any(), + eq("0000-1111-2222-3333"), + eq("22222"), + eq("/work")); + + verifyNoMoreInteractions(orcidClientMock); + + List historyRecords = orcidHistoryService.findAll(context); + assertThat(historyRecords, hasSize(2)); + assertThat(historyRecords, hasItem(matches(history(firstProfileItem, firstEntity, 201, INSERT)))); + assertThat(historyRecords, hasItem(matches(history(firstProfileItem, 204, DELETE)))); + } + @Test public void testWithOneValidationError() throws Exception { diff --git a/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java b/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java index 4d2e85a5b99e..a28c968eb992 100644 --- a/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java +++ b/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java @@ -242,6 +242,181 @@ public void testFullEmploymentCreation() { } + @Test + public void testDisambiguationFromOrgUnitHierarchyOnEmploymentCreation() { + + context.turnOffAuthorisationSystem(); + + Item orgUnitWithRinId = ItemBuilder.createItem(context, orgUnits) + .withTitle("4Science with rin") + .withOrgUnitCountry("IT") + .withOrgUnitLocality("Milan") + .withOrgUnitRinggoldIdentifier("12345") + .build(); + + Item orgUnit = ItemBuilder.createItem(context, orgUnits) + .withTitle("4Science") + .withOrgUnitCountry("IT") + .withOrgUnitLocality("Milan") + .withParentOrganization("4Science with rin", orgUnitWithRinId.getID().toString()) + .build(); + + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Test profile") + .withPersonAffiliation("4Science", orgUnit.getID().toString()) + .withPersonAffiliationStartDate("2020-02") + .withPersonAffiliationEndDate(PLACEHOLDER_PARENT_METADATA_VALUE) + .withPersonAffiliationRole("Researcher") + .build(); + + context.restoreAuthSystemState(); + + List values = new ArrayList<>(); + values.add(getMetadata(item, "oairecerif.person.affiliation", 0)); + values.add(getMetadata(item, "oairecerif.affiliation.startDate", 0)); + values.add(getMetadata(item, "oairecerif.affiliation.endDate", 0)); + values.add(getMetadata(item, "oairecerif.affiliation.role", 0)); + + Object firstOrcidObject = profileSectionFactoryService.createOrcidObject(context, values, AFFILIATION); + assertThat(firstOrcidObject, instanceOf(Employment.class)); + Employment qualification = (Employment) firstOrcidObject; + assertThat(qualification.getStartDate(), notNullValue()); + assertThat(qualification.getStartDate().getYear().getValue(), is("2020")); + assertThat(qualification.getStartDate().getMonth().getValue(), is("02")); + assertThat(qualification.getStartDate().getDay().getValue(), is("01")); + assertThat(qualification.getEndDate(), nullValue()); + assertThat(qualification.getRoleTitle(), is("Researcher")); + assertThat(qualification.getDepartmentName(), is("4Science")); + + Organization organization = qualification.getOrganization(); + assertThat(organization, notNullValue()); + assertThat(organization.getName(), is("4Science")); + assertThat(organization.getAddress(), notNullValue()); + assertThat(organization.getAddress().getCountry(), is(Iso3166Country.IT)); + assertThat(organization.getAddress().getCity(), is("Milan")); + assertThat(organization.getDisambiguatedOrganization(), notNullValue()); + assertThat(organization.getDisambiguatedOrganization().getDisambiguatedOrganizationIdentifier(), is("12345")); + assertThat(organization.getDisambiguatedOrganization().getDisambiguationSource(), is("RINGGOLD")); + } + + @Test + public void testDisambiguationFromOrgUnitHierarchyOnEmploymentCreationWithAncestor() { + + context.turnOffAuthorisationSystem(); + + Item orgUnitGranfather = ItemBuilder.createItem(context, orgUnits) + .withTitle("4Science with rin") + .withOrgUnitCountry("IT") + .withOrgUnitLocality("Milan") + .withOrgUnitRinggoldIdentifier("12345") + .build(); + + Item orgUnitFather = ItemBuilder.createItem(context, orgUnits) + .withTitle("4Science without rin") + .withOrgUnitCountry("IT") + .withOrgUnitLocality("Milan") + .withParentOrganization("4Science with rin", orgUnitGranfather.getID().toString()) + .build(); + + Item orgUnit = ItemBuilder.createItem(context, orgUnits) + .withTitle("4Science") + .withOrgUnitCountry("IT") + .withOrgUnitLocality("Milan") + .withParentOrganization("4Science without rin", orgUnitFather.getID().toString()) + .build(); + + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Test profile") + .withPersonAffiliation("4Science", orgUnit.getID().toString()) + .withPersonAffiliationStartDate("2020-02") + .withPersonAffiliationEndDate(PLACEHOLDER_PARENT_METADATA_VALUE) + .withPersonAffiliationRole("Researcher") + .build(); + + context.restoreAuthSystemState(); + + List values = new ArrayList<>(); + values.add(getMetadata(item, "oairecerif.person.affiliation", 0)); + values.add(getMetadata(item, "oairecerif.affiliation.startDate", 0)); + values.add(getMetadata(item, "oairecerif.affiliation.endDate", 0)); + values.add(getMetadata(item, "oairecerif.affiliation.role", 0)); + + Object firstOrcidObject = profileSectionFactoryService.createOrcidObject(context, values, AFFILIATION); + assertThat(firstOrcidObject, instanceOf(Employment.class)); + Employment qualification = (Employment) firstOrcidObject; + assertThat(qualification.getStartDate(), notNullValue()); + assertThat(qualification.getStartDate().getYear().getValue(), is("2020")); + assertThat(qualification.getStartDate().getMonth().getValue(), is("02")); + assertThat(qualification.getStartDate().getDay().getValue(), is("01")); + assertThat(qualification.getEndDate(), nullValue()); + assertThat(qualification.getRoleTitle(), is("Researcher")); + assertThat(qualification.getDepartmentName(), is("4Science")); + + Organization organization = qualification.getOrganization(); + assertThat(organization, notNullValue()); + assertThat(organization.getName(), is("4Science")); + assertThat(organization.getAddress(), notNullValue()); + assertThat(organization.getAddress().getCountry(), is(Iso3166Country.IT)); + assertThat(organization.getAddress().getCity(), is("Milan")); + assertThat(organization.getDisambiguatedOrganization(), notNullValue()); + assertThat(organization.getDisambiguatedOrganization().getDisambiguatedOrganizationIdentifier(), is("12345")); + assertThat(organization.getDisambiguatedOrganization().getDisambiguationSource(), is("RINGGOLD")); + } + + @Test + public void testDisambiguationFromOrgUnitHierarchyOnEmploymentCreationWithNoId() { + + context.turnOffAuthorisationSystem(); + + Item orgUnitWithRinId = ItemBuilder.createItem(context, orgUnits) + .withTitle("4Science with rin") + .withOrgUnitCountry("IT") + .withOrgUnitLocality("Milan") + .build(); + + Item orgUnit = ItemBuilder.createItem(context, orgUnits) + .withTitle("4Science") + .withOrgUnitCountry("IT") + .withOrgUnitLocality("Milan") + .withParentOrganization("4Science with rin", orgUnitWithRinId.getID().toString()) + .build(); + + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Test profile") + .withPersonAffiliation("4Science", orgUnit.getID().toString()) + .withPersonAffiliationStartDate("2020-02") + .withPersonAffiliationEndDate(PLACEHOLDER_PARENT_METADATA_VALUE) + .withPersonAffiliationRole("Researcher") + .build(); + + context.restoreAuthSystemState(); + + List values = new ArrayList<>(); + values.add(getMetadata(item, "oairecerif.person.affiliation", 0)); + values.add(getMetadata(item, "oairecerif.affiliation.startDate", 0)); + values.add(getMetadata(item, "oairecerif.affiliation.endDate", 0)); + values.add(getMetadata(item, "oairecerif.affiliation.role", 0)); + + Object firstOrcidObject = profileSectionFactoryService.createOrcidObject(context, values, AFFILIATION); + assertThat(firstOrcidObject, instanceOf(Employment.class)); + Employment qualification = (Employment) firstOrcidObject; + assertThat(qualification.getStartDate(), notNullValue()); + assertThat(qualification.getStartDate().getYear().getValue(), is("2020")); + assertThat(qualification.getStartDate().getMonth().getValue(), is("02")); + assertThat(qualification.getStartDate().getDay().getValue(), is("01")); + assertThat(qualification.getEndDate(), nullValue()); + assertThat(qualification.getRoleTitle(), is("Researcher")); + assertThat(qualification.getDepartmentName(), is("4Science")); + + Organization organization = qualification.getOrganization(); + assertThat(organization, notNullValue()); + assertThat(organization.getName(), is("4Science")); + assertThat(organization.getAddress(), notNullValue()); + assertThat(organization.getAddress().getCountry(), is(Iso3166Country.IT)); + assertThat(organization.getAddress().getCity(), is("Milan")); + assertThat(organization.getDisambiguatedOrganization(), nullValue()); + } + @Test public void testQualificationCreation() { context.turnOffAuthorisationSystem(); diff --git a/dspace-api/src/test/java/org/dspace/orcid/service/PlainMetadataSignatureGeneratorIT.java b/dspace-api/src/test/java/org/dspace/orcid/service/PlainMetadataSignatureGeneratorIT.java index af50c5d3facb..045b10d701bd 100644 --- a/dspace-api/src/test/java/org/dspace/orcid/service/PlainMetadataSignatureGeneratorIT.java +++ b/dspace-api/src/test/java/org/dspace/orcid/service/PlainMetadataSignatureGeneratorIT.java @@ -134,11 +134,15 @@ public void testSignatureGenerationWithSingleMetadataValue() { public void testSignatureGenerationWithManyEqualsMetadataValues() { context.turnOffAuthorisationSystem(); + Item person = ItemBuilder.createItem(context, collection) + .withTitle("Jesse Pinkman") + .build(); + Item item = ItemBuilder.createItem(context, collection) .withTitle("Item title") .withDescription("Description") - .withAuthor("Jesse Pinkman") - .withAuthor("Jesse Pinkman") + .withAuthor("Jesse Pinkman", person.getID().toString()) + .withAuthor("Jesse Pinkman", person.getID().toString()) .build(); context.restoreAuthSystemState(); @@ -146,12 +150,12 @@ public void testSignatureGenerationWithManyEqualsMetadataValues() { MetadataValue firstAuthor = getMetadata(item, "dc.contributor.author", 0); String firstSignature = generator.generate(context, List.of(firstAuthor)); assertThat(firstSignature, notNullValue()); - assertThat(firstSignature, equalTo("dc.contributor.author::Jesse Pinkman")); + assertThat(firstSignature, equalTo("dc.contributor.author::Jesse Pinkman::" + person.getID().toString())); MetadataValue secondAuthor = getMetadata(item, "dc.contributor.author", 1); String secondSignature = generator.generate(context, List.of(secondAuthor)); assertThat(secondSignature, notNullValue()); - assertThat(secondSignature, equalTo("dc.contributor.author::Jesse Pinkman")); + assertThat(secondSignature, equalTo("dc.contributor.author::Jesse Pinkman::" + person.getID().toString())); List metadataValues = generator.findBySignature(context, item, firstSignature); assertThat(metadataValues, hasSize(1)); diff --git a/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java b/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java index f69c0e3af762..632b4e2f83f4 100644 --- a/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java +++ b/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java @@ -8,21 +8,13 @@ package org.dspace.scripts; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.impl.MockDSpaceRunnableScript; -import org.springframework.beans.factory.annotation.Autowired; public class MockDSpaceRunnableScriptConfiguration extends ScriptConfiguration { - - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +31,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java index f757a746ab89..7aae1cf2719c 100644 --- a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java +++ b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java @@ -13,6 +13,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -27,6 +28,8 @@ import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.sql.SQLException; +import java.util.List; +import java.util.Map; import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.AnonymousAWSCredentials; @@ -84,7 +87,7 @@ public void setup() throws Exception { amazonS3Client = createAmazonS3Client(); - s3BitStoreService = new S3BitStoreService(amazonS3Client, null); + s3BitStoreService = new S3BitStoreService(amazonS3Client); context.turnOffAuthorisationSystem(); @@ -122,7 +125,7 @@ public void testBitstreamPutAndGetWithAlreadyPresentBucket() throws IOException s3BitStoreService.put(bitstream, toInputStream(content)); - String expectedChecksum = generateChecksum(content); + String expectedChecksum = Utils.toHex(generateChecksum(content)); assertThat(bitstream.getSizeBytes(), is((long) content.length())); assertThat(bitstream.getChecksum(), is(expectedChecksum)); @@ -153,7 +156,7 @@ public void testBitstreamPutAndGetWithoutSpecifingBucket() throws IOException { s3BitStoreService.put(bitstream, toInputStream(content)); - String expectedChecksum = generateChecksum(content); + String expectedChecksum = Utils.toHex(generateChecksum(content)); assertThat(bitstream.getSizeBytes(), is((long) content.length())); assertThat(bitstream.getChecksum(), is(expectedChecksum)); @@ -214,6 +217,47 @@ public void testBitstreamDeletion() throws IOException { } + @Test + public void testAbout() throws IOException { + + s3BitStoreService.init(); + + context.turnOffAuthorisationSystem(); + String content = "Test bitstream content"; + Bitstream bitstream = createBitstream(content); + context.restoreAuthSystemState(); + + s3BitStoreService.put(bitstream, toInputStream(content)); + + Map about = s3BitStoreService.about(bitstream, List.of()); + assertThat(about.size(), is(0)); + + about = s3BitStoreService.about(bitstream, List.of("size_bytes")); + assertThat(about, hasEntry("size_bytes", 22L)); + assertThat(about.size(), is(1)); + + about = s3BitStoreService.about(bitstream, List.of("size_bytes", "modified")); + assertThat(about, hasEntry("size_bytes", 22L)); + assertThat(about, hasEntry(is("modified"), notNullValue())); + assertThat(about.size(), is(2)); + + String expectedChecksum = Utils.toHex(generateChecksum(content)); + + about = s3BitStoreService.about(bitstream, List.of("size_bytes", "modified", "checksum")); + assertThat(about, hasEntry("size_bytes", 22L)); + assertThat(about, hasEntry(is("modified"), notNullValue())); + assertThat(about, hasEntry("checksum", expectedChecksum)); + assertThat(about.size(), is(3)); + + about = s3BitStoreService.about(bitstream, List.of("size_bytes", "modified", "checksum", "checksum_algorithm")); + assertThat(about, hasEntry("size_bytes", 22L)); + assertThat(about, hasEntry(is("modified"), notNullValue())); + assertThat(about, hasEntry("checksum", expectedChecksum)); + assertThat(about, hasEntry("checksum_algorithm", CSA)); + assertThat(about.size(), is(4)); + + } + @Test public void handleRegisteredIdentifierPrefixInS3() { String trueBitStreamId = "012345"; @@ -338,11 +382,11 @@ public void givenBitStreamIdentifierWithSlashesWhenSanitizedThenSlashesMustBeRem assertThat(computedPath, Matchers.not(Matchers.containsString(File.separator))); } - private String generateChecksum(String content) { + private byte[] generateChecksum(String content) { try { MessageDigest m = MessageDigest.getInstance("MD5"); m.update(content.getBytes()); - return Utils.toHex(m.digest()); + return m.digest(); } catch (NoSuchAlgorithmException e) { throw new RuntimeException(e); } diff --git a/dspace-api/src/test/java/org/dspace/util/PersonNameUtilTest.java b/dspace-api/src/test/java/org/dspace/util/PersonNameUtilTest.java index fe80bf143756..c0c5a0c02194 100644 --- a/dspace-api/src/test/java/org/dspace/util/PersonNameUtilTest.java +++ b/dspace-api/src/test/java/org/dspace/util/PersonNameUtilTest.java @@ -27,7 +27,8 @@ public class PersonNameUtilTest { @Test public void testWithAllNames() { - Set variants = getAllNameVariants("Luca", "Giamminonni", List.of("Giamminonni, Luca", "Luke Giammo")); + Set variants = getAllNameVariants("Luca", "Giamminonni", List.of("Giamminonni, Luca", + "Luke Giammo"), "uuid"); assertThat(variants, containsInAnyOrder("Giamminonni Luca", "Luca Giamminonni", "Giamminonni L.", "L. Giamminonni", "Giamminonni L", "L Giamminonni", "Luke Giammo", "Giammo Luke")); @@ -37,7 +38,7 @@ public void testWithAllNames() { public void testWithFirstNameComposedByTwoNames() { Set variants = getAllNameVariants("Luca Paolo", "Giamminonni", - List.of("Giamminonni, Luca", "Luke Giammo")); + List.of("Giamminonni, Luca", "Luke Giammo"), "uuid"); assertThat(variants, containsInAnyOrder("Giamminonni Luca Paolo", "Luca Paolo Giamminonni", "Giamminonni Luca", "Luca Giamminonni", "Giamminonni Paolo", "Paolo Giamminonni", @@ -51,7 +52,7 @@ public void testWithFirstNameComposedByTwoNames() { public void testWithFirstNameComposedByThreeNames() { Set variants = getAllNameVariants("Luca Paolo Claudio", "Giamminonni", - List.of("Giamminonni, Luca", "Luke Giammo")); + List.of("Giamminonni, Luca", "Luke Giammo"), "uuid"); assertThat(variants, containsInAnyOrder("Giamminonni Luca Paolo Claudio", "Luca Paolo Claudio Giamminonni", "Giamminonni Luca Claudio", "Luca Claudio Giamminonni", "Giamminonni Paolo Claudio", @@ -69,7 +70,8 @@ public void testWithFirstNameComposedByThreeNames() { @Test public void testWithoutFirstAndLastName() { - Set variants = getAllNameVariants(null, null, List.of("Giamminonni, Luca Fabio", "Luke Giammo")); + Set variants = getAllNameVariants(null, null, List.of("Giamminonni, Luca Fabio", "Luke Giammo"), + "uuid"); assertThat(variants, containsInAnyOrder("Giamminonni Luca Fabio", "Fabio Luca Giamminonni", "Giamminonni Fabio Luca", "Luca Fabio Giamminonni", "Luca Giamminonni Fabio", @@ -80,12 +82,13 @@ public void testWithoutFirstAndLastName() { @Test public void testWithAlreadyTruncatedName() { - Set variants = getAllNameVariants("L.", "Giamminonni", List.of("Giamminonni, Luca")); + Set variants = getAllNameVariants("L.", "Giamminonni", List.of("Giamminonni, Luca"), + "uuid"); assertThat(variants, containsInAnyOrder("Giamminonni Luca", "Luca Giamminonni", "Giamminonni L.", "L. Giamminonni", "Giamminonni L", "L Giamminonni")); - variants = getAllNameVariants("L. P.", "Giamminonni", List.of("Giamminonni, Luca")); + variants = getAllNameVariants("L. P.", "Giamminonni", List.of("Giamminonni, Luca"), "uuid"); assertThat(variants, containsInAnyOrder("Giamminonni Luca", "Luca Giamminonni", "L. Giamminonni", "Giamminonni L.", "P. Giamminonni", "Giamminonni P.", "Giamminonni L. P.", "L. P. Giamminonni", @@ -97,7 +100,8 @@ public void testWithAlreadyTruncatedName() { @Test public void testWithAlreadyTruncatedNameOnFullName() { - Set variants = getAllNameVariants("Luca", "Giamminonni", List.of("Giamminonni, L.")); + Set variants = getAllNameVariants("Luca", "Giamminonni", List.of("Giamminonni, L."), + "uuid"); assertThat(variants, containsInAnyOrder("Giamminonni Luca", "Luca Giamminonni", "Giamminonni L.", "L. Giamminonni", "Giamminonni L", "L Giamminonni")); diff --git a/dspace-api/src/test/java/org/dspace/util/TimeHelpersTest.java b/dspace-api/src/test/java/org/dspace/util/TimeHelpersTest.java new file mode 100644 index 000000000000..12055140a2f7 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/util/TimeHelpersTest.java @@ -0,0 +1,34 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import static org.junit.Assert.assertEquals; + +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.Date; + +import org.junit.Test; + +/** + * Test {@link TimeHelpers}. + * @author Mark H. Wood + */ +public class TimeHelpersTest { + /** + * Test of toMidnightUTC method, of class TimeHelpers. + */ + @Test + public void testToMidnightUTC() { + System.out.println("toMidnightUTC"); + Date from = Date.from(ZonedDateTime.of(1957, 01, 27, 04, 05, 06, 007, ZoneOffset.UTC).toInstant()); + Date expResult = Date.from(ZonedDateTime.of(1957, 01, 27, 00, 00, 00, 000, ZoneOffset.UTC).toInstant()); + Date result = TimeHelpers.toMidnightUTC(from); + assertEquals(expResult, result); + } +} diff --git a/dspace-api/src/test/resources/org/dspace/app/itemimport/test.pdf b/dspace-api/src/test/resources/org/dspace/app/itemimport/test.pdf new file mode 100644 index 000000000000..5b3749cbff73 Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/itemimport/test.pdf differ diff --git a/dspace-api/src/test/resources/org/dspace/authority/orcid/orcid-record.xml b/dspace-api/src/test/resources/org/dspace/authority/orcid/orcid-record.xml new file mode 100644 index 000000000000..7672e980c8bd --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/authority/orcid/orcid-record.xml @@ -0,0 +1,270 @@ + + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + + en + + + Direct + 2023-09-19T12:25:43.445Z + 2023-10-12T14:19:06.983Z + true + true + true + + + 2023-10-12T13:28:14.550Z + + 2023-09-19T12:25:43.736Z + 2023-09-19T12:25:43.736Z + Andrea + Bollini + + + 2023-10-12T13:28:14.550Z + + 2023-10-05T07:56:29.001Z + 2023-10-12T13:28:14.550Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + isco + + + 2023-10-12T13:28:14.541Z + 2023-10-12T13:28:14.541Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + Bollini, Andrea + + + + 2023-10-12T13:27:57.187Z + + 2023-10-12T10:35:14.406Z + 2023-10-12T13:27:57.187Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + Linkedin + https://it.linkedin.com/in/andreabollini + + + 2023-10-12T13:27:57.183Z + 2023-10-12T13:27:57.183Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + 4science + https://www.4science.it/ + + + + 2023-10-12T10:38:48.105Z + + 2023-10-12T10:33:21.077Z + 2023-10-12T10:38:48.105Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + test-orcid@mailinator.com + + + + + + 2023-09-04T09:04:52.121Z + + 2023-01-13T11:20:13.803Z + 2023-01-13T11:48:02.979Z + + + https://sandbox.orcid.org/client/0000-0002-3609-4817 + 0000-0002-3609-4817 + sandbox.orcid.org + + Scopus Wizard + + Scopus Author ID + 57432999200 + http://www.scopus.com/inward/authorDetails.url?authorID=57432999200&partnerID=MN8TOARS + self + + + 2023-01-19T14:25:14.512Z + 2023-01-19T14:25:14.512Z + + + https://sandbox.orcid.org/client/0000-0002-3609-4817 + 0000-0002-3609-4817 + sandbox.orcid.org + + Scopus Wizard + + Scopus Author ID + 35233141600 + http://www.scopus.com/inward/authorDetails.url?authorID=35233141600&partnerID=MN8TOARS + self + + + + + 2023-10-12T14:19:06.992Z + + + + 2023-10-12T10:52:26.965Z + + 2023-10-12T10:52:26.965Z + + + 2023-10-12T10:52:26.965Z + 2023-10-12T10:52:26.965Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + + Dspace + + Milan + IT + + + + + + 2023-10-12T10:35:49.079Z + + + 2023-10-12T10:34:17.514Z + 2023-10-12T10:35:49.079Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + + 4Science + + Milan + IT + + + https://ror.org/03vb2cr34 + ROR + + + + + + + + + + + + + + 2023-10-12T14:19:06.992Z + + 2023-10-12T14:19:06.992Z + + + doi + 10.1016/j.procs.2014.06.008 + 10.1016/j.procs.2014.06.008 + https://doi.org/10.1016/j.procs.2014.06.008 + self + + + eid + 55484808800 + 55484808800 + self + + + + 2023-10-12T14:09:25.415Z + 2023-10-12T14:19:06.992Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + + Publication Metadata in CERIF: Inspiration by FRBR + + + + doi + 10.1016/j.procs.2014.06.008 + 10.1016/j.procs.2014.06.008 + https://doi.org/10.1016/j.procs.2014.06.008 + self + + + issn + 1877-0509 + 1877-0509 + https://portal.issn.org/resource/ISSN/1877-0509 + part-of + + + eid + 55484808800 + 55484808800 + self + + + http://dx.doi.org/10.1016/j.procs.2014.06.008 + journal-article + + 2014 + + Procedia Computer Science + + + + + \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/iiif/canvasdimension/cat.jp2 b/dspace-api/src/test/resources/org/dspace/iiif/canvasdimension/cat.jp2 new file mode 100644 index 000000000000..a6649c088643 Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/iiif/canvasdimension/cat.jp2 differ diff --git a/dspace-iiif/pom.xml b/dspace-iiif/pom.xml index 97704a14dfef..f6846b955fbb 100644 --- a/dspace-iiif/pom.xml +++ b/dspace-iiif/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 .. diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java index 02aa4ced36b5..f0f37762cc0d 100644 --- a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java @@ -11,7 +11,6 @@ import java.net.URLEncoder; import java.nio.charset.StandardCharsets; import java.util.ArrayList; -import java.util.Formatter; import java.util.List; import java.util.UUID; @@ -123,58 +122,27 @@ private String adjustQuery(String query) { } /** - * Converts unicode to XML-encoded codepoint. - * @param s query terms - * @return encoded query - */ - private String toXMLEncoded(String s) { - Formatter formatter = new Formatter(); - int len = s.length(); - for (int i = 0; i < len; i = s.offsetByCodePoints(i, 1)) { - int c = s.codePointAt(i); - if (c < 32 || c > 126) { - formatter.format("&#%d;", c); - } else { - formatter.format("%c", c); - } - } - return formatter.toString(); - } - - /** - * Constructs a solr search URL. + * Constructs a solr search URL. Compatible with solr-ocrhighlighting-0.7.2. + * https://github.com/dbmdz/solr-ocrhighlighting/releases/tag/0.7.2 * * @param query the search terms * @param manifestId the id of the manifest in which to search * @return solr query */ private SolrQuery getSolrQuery(String query, String manifestId) { - boolean encode = configurationService.getBooleanProperty("iiif.search.index.xml.encode"); - if (encode) { - query = toXMLEncoded(query); - } - String snippetCount = configurationService.getProperty("iiif.search.snippets"); - String contextBlock = configurationService.getProperty("iiif.search.contextBlock"); - String limitBlock = configurationService.getProperty("iiif.search.limitBlock"); - String scorePassages = configurationService.getProperty("iiif.search.scorePassages"); - String absoluteHighlights = configurationService.getProperty("iiif.search.absoluteHighlights"); - String contextSize = configurationService.getProperty("iiif.search.contextSize"); - String trackPages = configurationService.getProperty("iiif.search.trackPages"); - String rows = configurationService.getProperty("iiif.search.rows"); SolrQuery solrQuery = new SolrQuery(); solrQuery.set("q", "ocr_text:" + query + " AND manifest_url:\"" + manifestId + "\""); solrQuery.set(CommonParams.WT, "json"); solrQuery.set("fl", "id"); solrQuery.set("hl", "true"); solrQuery.set("hl.ocr.fl", "ocr_text"); - solrQuery.set("rows", rows); - solrQuery.set("hl.ocr.contextBlock", contextBlock); - solrQuery.set("hl.ocr.contextSize", contextSize); - solrQuery.set("hl.snippets", snippetCount); - solrQuery.set("hl.ocr.trackPages", trackPages); - solrQuery.set("hl.ocr.limitBlock",limitBlock); - solrQuery.set("hl.ocr.scorePassages", scorePassages); - solrQuery.set("hl.ocr.absoluteHighlights", absoluteHighlights); + solrQuery.set("hl.ocr.contextBlock", "line"); + solrQuery.set("hl.ocr.contextSize", "2"); + solrQuery.set("hl.snippets", "8192"); + solrQuery.set("hl.ocr.maxPassages", "8192"); + solrQuery.set("hl.ocr.trackPages", "on"); + solrQuery.set("hl.ocr.limitBlock","page"); + solrQuery.set("hl.ocr.absoluteHighlights", "true"); return solrQuery; } diff --git a/dspace-oai/pom.xml b/dspace-oai/pom.xml index 5c6050d48a4a..a3ab33551c0d 100644 --- a/dspace-oai/pom.xml +++ b/dspace-oai/pom.xml @@ -8,14 +8,14 @@ dspace-parent org.dspace - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 .. ${basedir}/.. - 3.3.0 + 3.4.0 5.87.0.RELEASE @@ -35,24 +35,6 @@ - - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - - commons-cli @@ -99,6 +81,11 @@ org.codehaus.woodstox wstx-asl + + + com.fasterxml.woodstox + woodstox-core + org.dom4j diff --git a/dspace-oai/src/main/java/org/dspace/xoai/app/DataciteDOIItemCompilePlugin.java b/dspace-oai/src/main/java/org/dspace/xoai/app/DataciteDOIItemCompilePlugin.java new file mode 100644 index 000000000000..5c40465f5908 --- /dev/null +++ b/dspace-oai/src/main/java/org/dspace/xoai/app/DataciteDOIItemCompilePlugin.java @@ -0,0 +1,75 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.xoai.app; + +import java.util.Arrays; +import java.util.List; + +import com.lyncode.xoai.dataprovider.xml.xoai.Element; +import com.lyncode.xoai.dataprovider.xml.xoai.Metadata; +import org.apache.commons.lang.StringUtils; +import org.dspace.content.Item; +import org.dspace.content.integration.crosswalks.virtualfields.ItemDOIService; +import org.dspace.core.Context; +import org.dspace.xoai.util.ItemUtils; +import org.springframework.beans.factory.annotation.Autowired; + + +/** + * XOAIExtensionItemCompilePlugin aims to add structured information about the + * DOIs of the item (if any). + * The xoai document will be enriched with a structure like that + * + * + * + * + * + * + * + * + * ... + * + * + * + * + * + * + */ +public class DataciteDOIItemCompilePlugin implements XOAIExtensionItemCompilePlugin { + + @Autowired + private ItemDOIService itemDOIService; + + @Override + public Metadata additionalMetadata(Context context, Metadata metadata, Item item) { + String primaryDoiValue = itemDOIService.getPrimaryDOIFromItem(item); + String[] alternativeDoiValue = itemDOIService.getAlternativeDOIFromItem(item); + Element datacite = ItemUtils.create("datacite"); + if (StringUtils.isNotBlank(primaryDoiValue)) { + Element primary = ItemUtils.create("primary"); + datacite.getElement().add(primary); + primary.getField().add(ItemUtils.createValue("doi", primaryDoiValue)); + if (alternativeDoiValue != null && alternativeDoiValue.length != 0) { + Element alternative = ItemUtils.create("alternative"); + datacite.getElement().add(alternative); + Arrays.stream(alternativeDoiValue) + .forEach(value -> alternative.getField().add(ItemUtils.createValue("doi", value))); + } + Element other; + List elements = metadata.getElement(); + if (ItemUtils.getElement(elements, "others") != null) { + other = ItemUtils.getElement(elements, "others"); + } else { + other = ItemUtils.create("others"); + } + other.getElement().add(datacite); + } + return metadata; + } + +} diff --git a/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java b/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java index e27a3ee947cb..687dad7cddfa 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java @@ -36,6 +36,7 @@ import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.Options; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.solr.client.solrj.SolrClient; @@ -81,11 +82,10 @@ */ @SuppressWarnings("deprecation") public class XOAI { - private static Logger log = LogManager.getLogger(XOAI.class); + private static final Logger log = LogManager.getLogger(XOAI.class); // needed because the solr query only returns 10 rows by default private final Context context; - private boolean optimize; private final boolean verbose; private boolean clean; @@ -104,7 +104,7 @@ public class XOAI { private final static ConfigurationService configurationService = DSpaceServicesFactory.getInstance() .getConfigurationService(); - private List extensionPlugins; + private final List extensionPlugins; private List getFileFormats(Item item) { List formats = new ArrayList<>(); @@ -122,9 +122,8 @@ private List getFileFormats(Item item) { return formats; } - public XOAI(Context context, boolean optimize, boolean clean, boolean verbose) { + public XOAI(Context context, boolean clean, boolean verbose) { this.context = context; - this.optimize = optimize; this.clean = clean; this.verbose = verbose; @@ -151,9 +150,9 @@ private void println(String line) { } public int index() throws DSpaceSolrIndexerException { - int result = 0; - try { + int result; + try { if (clean) { clearIndex(); System.out.println("Using full import."); @@ -169,15 +168,9 @@ public int index() throws DSpaceSolrIndexerException { } else { result = this.index((Date) results.get(0).getFieldValue("item.lastmodified")); } - } - solrServerResolver.getServer().commit(); - if (optimize) { - println("Optimizing Index"); - solrServerResolver.getServer().optimize(); - println("Index optimized"); - } + solrServerResolver.getServer().commit(); // Set last compilation date xoaiLastCompilationCacheService.put(new Date()); @@ -214,7 +207,7 @@ private int index(Date last) throws DSpaceSolrIndexerException, IOException { * @param last maximum date for an item to be considered for an update * @return Iterator over list of items which might have changed their visibility * since the last update. - * @throws DSpaceSolrIndexerException + * @throws DSpaceSolrIndexerException e */ private Iterator getItemsWithPossibleChangesBefore(Date last) throws DSpaceSolrIndexerException, IOException { try { @@ -365,7 +358,7 @@ private int index(Iterator iterator) throws DSpaceSolrIndexerException { * * @param item Item * @return date - * @throws SQLException + * @throws SQLException e */ private Date getMostRecentModificationDate(Item item) throws SQLException { List dates = new LinkedList<>(); @@ -398,8 +391,12 @@ private SolrInputDocument index(Item item) SolrInputDocument doc = new SolrInputDocument(); doc.addField("item.id", item.getID().toString()); + String legacyOaiId = itemService.getMetadataFirstValue(item, "dspace", "legacy", "oai-identifier", Item.ANY); String handle = item.getHandle(); - doc.addField("item.handle", handle); + doc.addField("item.handle", item.getHandle()); + if (StringUtils.isNotEmpty(legacyOaiId)) { + doc.addField("item.legacyoaiidentifier", legacyOaiId.split(":")[2]); + } boolean isEmbargoed = !this.isPublic(item); boolean isCurrentlyVisible = this.checkIfVisibleInOAI(item); @@ -418,7 +415,7 @@ private SolrInputDocument index(Item item) * future will be marked as such. */ - boolean isPublic = isEmbargoed ? (isIndexed ? isCurrentlyVisible : false) : true; + boolean isPublic = !isEmbargoed || (isIndexed && isCurrentlyVisible); doc.addField("item.public", isPublic); // if the visibility of the item will change in the future due to an @@ -433,8 +430,7 @@ private SolrInputDocument index(Item item) * because this will override the item.public flag. */ - doc.addField("item.deleted", - (item.isWithdrawn() || !item.isDiscoverable() || (isEmbargoed ? isPublic : false))); + doc.addField("item.deleted", (item.isWithdrawn() || !item.isDiscoverable() || (isEmbargoed && isPublic))); /* * An item that is embargoed will potentially not be harvested by incremental @@ -574,8 +570,8 @@ private static void cleanCache(XOAIItemCacheService xoaiItemCacheService, XOAICa public static void main(String[] argv) throws IOException, ConfigurationException { - AnnotationConfigApplicationContext applicationContext = new AnnotationConfigApplicationContext( - new Class[] { BasicConfiguration.class }); + AnnotationConfigApplicationContext applicationContext = + new AnnotationConfigApplicationContext(BasicConfiguration.class); XOAICacheService cacheService = applicationContext.getBean(XOAICacheService.class); XOAIItemCacheService itemCacheService = applicationContext.getBean(XOAIItemCacheService.class); @@ -586,7 +582,6 @@ public static void main(String[] argv) throws IOException, ConfigurationExceptio CommandLineParser parser = new DefaultParser(); Options options = new Options(); options.addOption("c", "clear", false, "Clear index before indexing"); - options.addOption("o", "optimize", false, "Optimize index at the end"); options.addOption("v", "verbose", false, "Verbose output"); options.addOption("h", "help", false, "Shows some help"); options.addOption("n", "number", true, "FOR DEVELOPMENT MUST DELETE"); @@ -596,10 +591,9 @@ public static void main(String[] argv) throws IOException, ConfigurationExceptio String[] validDatabaseCommands = { COMMAND_CLEAN_CACHE, COMMAND_COMPILE_ITEMS, COMMAND_ERASE_COMPILED_ITEMS }; - boolean solr = true; // Assuming solr by default - solr = !("database").equals(configurationService.getProperty("oai.storage", "solr")); - + boolean solr = !("database").equals(configurationService.getProperty("oai.storage", "solr")); boolean run = false; + if (line.getArgs().length > 0) { if (solr) { if (Arrays.asList(validSolrCommands).contains(line.getArgs()[0])) { @@ -620,7 +614,7 @@ public static void main(String[] argv) throws IOException, ConfigurationExceptio if (COMMAND_IMPORT.equals(command)) { ctx = new Context(Context.Mode.READ_ONLY); - XOAI indexer = new XOAI(ctx, line.hasOption('o'), line.hasOption('c'), line.hasOption('v')); + XOAI indexer = new XOAI(ctx, line.hasOption('c'), line.hasOption('v')); applicationContext.getAutowireCapableBeanFactory().autowireBean(indexer); @@ -706,7 +700,6 @@ private static void usage() { System.out.println(" " + COMMAND_IMPORT + " - To import DSpace items into OAI index and cache system"); System.out.println(" " + COMMAND_CLEAN_CACHE + " - Cleans the OAI cached responses"); System.out.println("> Parameters:"); - System.out.println(" -o Optimize index after indexing (" + COMMAND_IMPORT + " only)"); System.out.println(" -c Clear index (" + COMMAND_IMPORT + " only)"); System.out.println(" -v Verbose output"); System.out.println(" -h Shows this text"); diff --git a/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java b/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java new file mode 100644 index 000000000000..1571dea8b880 --- /dev/null +++ b/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java @@ -0,0 +1,75 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.xoai.app.plugins; + +import java.sql.SQLException; +import java.util.List; + +import com.lyncode.xoai.dataprovider.xml.xoai.Element; +import com.lyncode.xoai.dataprovider.xml.xoai.Metadata; +import org.dspace.access.status.factory.AccessStatusServiceFactory; +import org.dspace.access.status.service.AccessStatusService; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.xoai.app.XOAIExtensionItemCompilePlugin; +import org.dspace.xoai.util.ItemUtils; + +/** + * AccessStatusElementItemCompilePlugin aims to add structured information about the + * Access Status of the item (if any). + + * The xoai document will be enriched with a structure like that + *

      + * {@code
      + *   
      + *       
      + *          open.access
      + *       
      + *   
      + *   OR
      + *   
      + *       
      + *          embargo
      + *          2024-10-10
      + *       
      + *   
      + * }
      + * 
      + * Returning Values are based on: + * @see org.dspace.access.status.DefaultAccessStatusHelper DefaultAccessStatusHelper + */ +public class AccessStatusElementItemCompilePlugin implements XOAIExtensionItemCompilePlugin { + + @Override + public Metadata additionalMetadata(Context context, Metadata metadata, Item item) { + AccessStatusService accessStatusService = AccessStatusServiceFactory.getInstance().getAccessStatusService(); + + try { + String accessStatusType; + accessStatusType = accessStatusService.getAccessStatus(context, item); + + Element accessStatus = ItemUtils.create("access-status"); + accessStatus.getField().add(ItemUtils.createValue("value", accessStatusType)); + + Element others; + List elements = metadata.getElement(); + if (ItemUtils.getElement(elements, "others") != null) { + others = ItemUtils.getElement(elements, "others"); + } else { + others = ItemUtils.create("others"); + } + others.getElement().add(accessStatus); + + } catch (SQLException e) { + e.printStackTrace(); + } + + return metadata; + } + +} diff --git a/dspace-oai/src/main/java/org/dspace/xoai/data/DSpaceItem.java b/dspace-oai/src/main/java/org/dspace/xoai/data/DSpaceItem.java index d1f78ee67152..b179229987da 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/data/DSpaceItem.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/data/DSpaceItem.java @@ -105,7 +105,8 @@ public List getAbout() { @Override public String getIdentifier() { - return buildIdentifier(getHandle()); + return !getMetadata("dspace","legacy", "oai-identifier").isEmpty() ? + getMetadata("dspace","legacy", "oai-identifier").get(0) : buildIdentifier(getHandle()); } private static class MetadataNamePredicate implements Predicate { diff --git a/dspace-oai/src/main/java/org/dspace/xoai/filter/DSpaceAuthorizationFilter.java b/dspace-oai/src/main/java/org/dspace/xoai/filter/DSpaceAuthorizationFilter.java index ebb19c84b5e2..baf8552a6029 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/filter/DSpaceAuthorizationFilter.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/filter/DSpaceAuthorizationFilter.java @@ -9,12 +9,17 @@ package org.dspace.xoai.filter; import java.sql.SQLException; +import java.util.Iterator; +import java.util.List; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.factory.AuthorizeServiceFactory; import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.service.HandleService; @@ -33,6 +38,9 @@ public class DSpaceAuthorizationFilter extends DSpaceFilter { private static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); + private static final ItemService itemService + = ContentServiceFactory.getInstance().getItemService(); + @Override public boolean isShown(DSpaceItem item) { boolean pub = false; @@ -43,6 +51,11 @@ public boolean isShown(DSpaceItem item) { return false; } Item dspaceItem = (Item) handleService.resolveToObject(context, handle); + + if (dspaceItem == null) { + dspaceItem = fromLegacyIdentifier(item); + } + if (dspaceItem == null) { return false; } @@ -55,6 +68,25 @@ public boolean isShown(DSpaceItem item) { return pub; } + private Item fromLegacyIdentifier(DSpaceItem item) { + List legacyIdentifier = item.getMetadata("dspace.legacy.oai-identifier"); + if (legacyIdentifier.isEmpty()) { + return null; + } + try { + Iterator + iterator = itemService.findUnfilteredByMetadataField( + context, "dspace", "legacy", "oai-identifier", + legacyIdentifier.get(0)); + if (!iterator.hasNext()) { + return null; + } + return iterator.next(); + } catch (AuthorizeException | SQLException e) { + throw new RuntimeException(e); + } + } + @Override public SolrFilterResult buildSolrQuery() { return new SolrFilterResult("item.public:true"); diff --git a/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/DSpaceResourceResolver.java b/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/DSpaceResourceResolver.java index e67e9c56bd7a..83c4486f7134 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/DSpaceResourceResolver.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/DSpaceResourceResolver.java @@ -12,7 +12,7 @@ import java.io.IOException; import java.io.InputStream; import javax.xml.transform.Source; -import javax.xml.transform.Transformer; +import javax.xml.transform.Templates; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.stream.StreamSource; @@ -40,8 +40,7 @@ public InputStream getResource(String path) throws IOException { } @Override - public Transformer getTransformer(String path) throws IOException, - TransformerConfigurationException { + public Templates getTemplates(String path) throws IOException, TransformerConfigurationException { // construct a Source that reads from an InputStream Source mySrc = new StreamSource(getResource(path)); // specify a system ID (the path to the XSLT-file on the filesystem) @@ -49,6 +48,6 @@ public Transformer getTransformer(String path) throws IOException, // XSLT-files (like ) String systemId = basePath + "/" + path; mySrc.setSystemId(systemId); - return transformerFactory.newTransformer(mySrc); + return transformerFactory.newTemplates(mySrc); } } diff --git a/dspace-oai/src/main/java/org/dspace/xoai/services/impl/xoai/DSpaceItemSolrRepository.java b/dspace-oai/src/main/java/org/dspace/xoai/services/impl/xoai/DSpaceItemSolrRepository.java index 281095e01aab..fc08fb6fb413 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/services/impl/xoai/DSpaceItemSolrRepository.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/services/impl/xoai/DSpaceItemSolrRepository.java @@ -59,8 +59,13 @@ public Item getItem(String identifier) throws IdDoesNotExistException { String parts[] = identifier.split(Pattern.quote(":")); if (parts.length == 3) { try { - SolrQuery params = new SolrQuery("item.handle:" + parts[2]); - return new DSpaceSolrItem(DSpaceSolrSearch.querySingle(server, params)); + try { + SolrQuery params = new SolrQuery("item.legacyoaiidentifier:" + parts[2]); + return new DSpaceSolrItem(DSpaceSolrSearch.querySingle(server, params)); + } catch (SolrSearchEmptyException ex) { + SolrQuery altParams = new SolrQuery("item.handle:" + parts[2]); + return new DSpaceSolrItem(DSpaceSolrSearch.querySingle(server, altParams)); + } } catch (SolrSearchEmptyException | IOException ex) { throw new IdDoesNotExistException(ex); } diff --git a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java index 1dbb5dda00f3..cc06610fbb96 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java @@ -21,6 +21,8 @@ import org.dspace.app.util.factory.UtilServiceFactory; import org.dspace.app.util.service.MetadataExposureService; import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.Item; @@ -65,6 +67,9 @@ public class ItemUtils { private static final ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + private static final AuthorizeService authorizeService + = AuthorizeServiceFactory.getInstance().getAuthorizeService(); + private static final MetadataAuthorityService mam = ContentAuthorityServiceFactory .getInstance().getMetadataAuthorityService(); @@ -206,6 +211,11 @@ private static Element createBundlesElement(Context context, Item item) throws S bundle.getElement().add(bitstreams); List bits = b.getBitstreams(); for (Bitstream bit : bits) { + // Check if bitstream is null and log the error + if (bit == null) { + log.error("Null bitstream found, check item uuid: " + item.getID()); + break; + } Element bitstream = create("bitstream"); bitstreams.getElement().add(bitstream); String url = ""; @@ -261,13 +271,17 @@ private static Element createLicenseElement(Context context, Item item) List licBits = licBundle.getBitstreams(); if (!licBits.isEmpty()) { Bitstream licBit = licBits.get(0); - InputStream in; - - in = bitstreamService.retrieve(context, licBit); - ByteArrayOutputStream out = new ByteArrayOutputStream(); - Utils.bufferedCopy(in, out); - license.getField().add(createValue("bin", Base64Utils.encode(out.toString()))); - + if (authorizeService.authorizeActionBoolean(context, licBit, Constants.READ)) { + InputStream in; + + in = bitstreamService.retrieve(context, licBit); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + Utils.bufferedCopy(in, out); + license.getField().add(createValue("bin", Base64Utils.encode(out.toString()))); + } else { + log.info("Missing READ rights for license bitstream. Did not include license bitstream for item: " + + item.getID() + "."); + } } } return license; diff --git a/dspace-oai/src/test/java/org/dspace/xoai/tests/integration/xoai/PipelineTest.java b/dspace-oai/src/test/java/org/dspace/xoai/tests/integration/xoai/PipelineTest.java index de76c992458c..0f48824159c2 100644 --- a/dspace-oai/src/test/java/org/dspace/xoai/tests/integration/xoai/PipelineTest.java +++ b/dspace-oai/src/test/java/org/dspace/xoai/tests/integration/xoai/PipelineTest.java @@ -29,7 +29,7 @@ public void pipelineTest() throws Exception { InputStream input = PipelineTest.class.getClassLoader().getResourceAsStream("item.xml"); InputStream xslt = PipelineTest.class.getClassLoader().getResourceAsStream("oai_dc.xsl"); String output = FileUtils.readAllText(new XSLPipeline(input, true) - .apply(factory.newTransformer(new StreamSource(xslt))) + .apply(factory.newTemplates(new StreamSource(xslt))) .getTransformed()); assertThat(output, oai_dc().withXPath("/oai_dc:dc/dc:title", equalTo("Teste"))); diff --git a/dspace-rdf/pom.xml b/dspace-rdf/pom.xml index c6d887b773c0..83af00bc343f 100644 --- a/dspace-rdf/pom.xml +++ b/dspace-rdf/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 .. diff --git a/dspace-rest/pom.xml b/dspace-rest/pom.xml index 6a5945560682..a77c960283e7 100644 --- a/dspace-rest/pom.xml +++ b/dspace-rest/pom.xml @@ -3,7 +3,7 @@ org.dspace dspace-rest war - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 DSpace (Deprecated) REST Webapp DSpace RESTful Web Services API. NOTE: this REST API is DEPRECATED. Please consider using the REST API in the dspace-server-webapp instead! @@ -12,7 +12,7 @@ org.dspace dspace-parent - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 .. diff --git a/dspace-server-webapp/README.md b/dspace-server-webapp/README.md index 8d3853e8ccc7..d418124ea171 100644 --- a/dspace-server-webapp/README.md +++ b/dspace-server-webapp/README.md @@ -10,7 +10,7 @@ This webapp uses the following technologies: We don't use Spring Data REST as we haven't a spring data layer and we want to provide clear separation between the persistence representation and the REST representation ## How to contribute -Check the infomation available on the DSpace Official Wiki page for the [DSpace 7 Working Group](https://wiki.duraspace.org/display/DSPACE/DSpace+7+UI+Working+Group) +Check the information available on the DSpace Official Wiki page for the [DSpace 7 Working Group](https://wiki.duraspace.org/display/DSPACE/DSpace+7+UI+Working+Group) [DSpace 7 REST: Coding DSpace Objects](https://wiki.duraspace.org/display/DSPACE/DSpace+7+REST%3A+Coding+DSpace+Objects) diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml index 5ad15c1097b5..a26174341efe 100644 --- a/dspace-server-webapp/pom.xml +++ b/dspace-server-webapp/pom.xml @@ -2,7 +2,6 @@ 4.0.0 org.dspace dspace-server-webapp - war DSpace Server Webapp DSpace Server Webapp (Spring Boot) @@ -15,7 +14,7 @@ org.dspace dspace-parent - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 .. @@ -25,23 +24,82 @@ @ - - org.dspace.app.rest.Application - org.apache.maven.plugins - maven-war-plugin - - true - - true - + org.codehaus.mojo + properties-maven-plugin + 1.1.0 + + + initialize + + read-project-properties + + + + ${root.basedir}/dspace/config/dspace.cfg + ${root.basedir}/dspace/config/local.cfg + + true + + + + + + maven-resources-plugin - prepare-package + testEnvironment + process-resources + + testResources + + + + + ${basedir}/src/test/resources + + + + + + webappFiltering + process-resources + + resources + + + + + ${basedir}/src/main/resources + + **/*application*.properties + **/*dspace*.properties + + true + + + ${basedir}/src/main/resources + + **/*application*.properties + **/*dspace*.properties + + + **/*.properties + + + + ${basedir}/src/main/resources + + **/static/** + **/spring/** + + + + @@ -66,11 +124,11 @@ **/src/test/resources/** **/src/test/data/** - src/main/webapp/index.html - src/main/webapp/login.html - src/main/webapp/styles.css - src/main/webapp/js/hal/** - src/main/webapp/js/vendor/** + src/main/resources/static/index.html + src/main/resources/static/login.html + src/main/resources/static/styles.css + src/main/resources/static/js/hal/** + src/main/resources/static/js/vendor/** @@ -275,7 +333,7 @@ - + addon-analytics @@ -291,7 +349,24 @@ jar - + + + + addon-dataquality + + + dq.on + + + + + it.4science.dspace + addon-dataquality + ${addon-dataquality.version} + jar + + + @@ -321,12 +396,6 @@ - - org.springframework.boot - spring-boot-starter-tomcat - provided - ${spring-boot.version} - org.springframework.boot @@ -356,7 +425,7 @@ com.flipkart.zjsonpatch zjsonpatch - 0.4.6 + 0.4.14 @@ -376,7 +445,7 @@ org.webjars.bowergithub.jquery jquery-dist - 3.6.0 + 3.7.0 @@ -390,7 +459,7 @@ org.webjars.bowergithub.medialize uri.js - 1.19.10 + 1.19.11 @@ -562,12 +631,10 @@ com.jayway.jsonpath json-path - test com.jayway.jsonpath json-path-assert - test junit @@ -615,7 +682,7 @@ org.exparity hamcrest-date - 2.0.7 + 2.0.8 test diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java new file mode 100644 index 000000000000..aa511bcb9282 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java @@ -0,0 +1,61 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.dspace.app.rest.utils.ContextUtil.obtainContext; + +import java.sql.SQLException; +import javax.servlet.http.HttpServletRequest; + +import com.fasterxml.jackson.databind.JsonNode; +import org.dspace.app.rest.model.BitstreamRest; +import org.dspace.app.rest.repository.BitstreamRestRepository; +import org.dspace.authorize.AuthorizeException; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.hateoas.RepresentationModel; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +/** + * REST controller for handling bulk updates to Bitstream resources. + *

      + * This controller is responsible for handling requests to the bitstream category, which allows for updating + * multiple bitstream resources in a single operation. + *

      + * + * @author Jens Vannerum (jens.vannerum@atmire.com) + */ +@RestController +@RequestMapping("/api/" + BitstreamRest.CATEGORY + "/" + BitstreamRest.PLURAL_NAME) +public class BitstreamCategoryRestController { + @Autowired + BitstreamRestRepository bitstreamRestRepository; + + /** + * Handles PATCH requests to the bitstream category for bulk updates of bitstream resources. + * + * @param request the HTTP request object. + * @param jsonNode the JSON representation of the bulk update operation, containing the updates to be applied. + * @return a ResponseEntity representing the HTTP response to be sent back to the client, in this case, a + * HTTP 204 No Content response since currently only a delete operation is supported. + * @throws SQLException if an error occurs while accessing the database. + * @throws AuthorizeException if the user is not authorized to perform the requested operation. + */ + @RequestMapping(method = RequestMethod.PATCH) + public ResponseEntity> patch(HttpServletRequest request, + @RequestBody(required = true) JsonNode jsonNode) + throws SQLException, AuthorizeException { + Context context = obtainContext(request); + bitstreamRestRepository.patchBitstreamsInBulk(context, jsonNode); + return ResponseEntity.noContent().build(); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamRestController.java index 8f76885b98e3..38588ed1f4ba 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamRestController.java @@ -17,6 +17,7 @@ import java.util.UUID; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; +import javax.ws.rs.core.Response; import org.apache.catalina.connector.ClientAbortException; import org.apache.commons.lang3.StringUtils; @@ -149,8 +150,11 @@ public ResponseEntity retrieve(@PathVariable UUID uuid, HttpServletResponse resp } //Determine if we need to send the file as a download or if the browser can open it inline + //The file will be downloaded if its size is larger than the configured threshold, + //or if its mimetype/extension appears in the "webui.content_disposition_format" config long dispositionThreshold = configurationService.getLongProperty("webui.content_disposition_threshold"); - if (dispositionThreshold >= 0 && filesize > dispositionThreshold) { + if ((dispositionThreshold >= 0 && filesize > dispositionThreshold) + || checkFormatForContentDisposition(format)) { httpHeadersInitializer.withDisposition(HttpHeadersInitializer.CONTENT_DISPOSITION_ATTACHMENT); } @@ -191,6 +195,30 @@ private String getBitstreamName(Bitstream bit, BitstreamFormat format) { return name; } + private boolean isNotAnErrorResponse(HttpServletResponse response) { + Response.Status.Family responseCode = Response.Status.Family.familyOf(response.getStatus()); + return responseCode.equals(Response.Status.Family.SUCCESSFUL) + || responseCode.equals(Response.Status.Family.REDIRECTION); + } + + private boolean checkFormatForContentDisposition(BitstreamFormat format) { + // never automatically download undefined formats + if (format == null) { + return false; + } + List formats = List.of((configurationService.getArrayProperty("webui.content_disposition_format"))); + boolean download = formats.contains(format.getMIMEType()); + if (!download) { + for (String ext : format.getExtensions()) { + if (formats.contains(ext)) { + download = true; + break; + } + } + } + return download; + } + /** * This method will update the bitstream format of the bitstream that corresponds to the provided bitstream uuid. * diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/EPersonGroupRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/EPersonGroupRestController.java index 8e098d28d2e7..ec101bf6d7f8 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/EPersonGroupRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/EPersonGroupRestController.java @@ -42,6 +42,18 @@ public class EPersonGroupRestController implements InitializingBean { private ConverterService converter; @Autowired private CollectionRestRepository collectionRestRepository; + + /** + * This request can be used to join a user to a target group by using a registration data token will be replaced + * by the {@link EPersonRegistrationRestController} features. + * + * @param context + * @param uuid + * @param token + * @return + * @throws Exception + */ + @Deprecated @RequestMapping(method = RequestMethod.POST, value = EPersonRest.CATEGORY + "/" + EPersonRest.PLURAL_NAME + "/{uuid}/" + EPersonRest.GROUPS) public ResponseEntity> joinUserToGroups(Context context, diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/EPersonRegistrationRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/EPersonRegistrationRestController.java new file mode 100644 index 000000000000..87b402df9d22 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/EPersonRegistrationRestController.java @@ -0,0 +1,86 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import java.util.List; +import java.util.UUID; +import javax.servlet.http.HttpServletRequest; +import javax.validation.constraints.NotNull; + +import org.dspace.app.rest.converter.ConverterService; +import org.dspace.app.rest.model.EPersonRest; +import org.dspace.app.rest.model.hateoas.EPersonResource; +import org.dspace.app.rest.repository.EPersonRestRepository; +import org.dspace.app.rest.utils.ContextUtil; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.rest.webmvc.ControllerUtils; +import org.springframework.hateoas.RepresentationModel; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +@RestController +@RequestMapping("/api/" + EPersonRest.CATEGORY + "/" + EPersonRest.PLURAL_NAME) +public class EPersonRegistrationRestController { + + @Autowired + private EPersonRestRepository ePersonRestRepository; + + @Autowired + private ConverterService converter; + + /** + * This method will merge the data coming from a {@link org.dspace.eperson.RegistrationData} into the current + * logged-in user. + *
      + * The request must have an empty body, and a token parameter should be provided: + *
      +     *  
      +     *   curl -X POST http://${dspace.url}/api/eperson/epersons/${id-eperson}?token=${token}&override=${metadata-fields}
      +     *        -H "Content-Type: application/json"
      +     *        -H "Authorization: Bearer ${bearer-token}"
      +     *  
      +     * 
      + * @param request httpServletRequest incoming + * @param uuid uuid of the eperson + * @param token registration token + * @param override fields to override inside from the registration data to the eperson + * @return + * @throws Exception + */ + @RequestMapping(method = RequestMethod.POST, value = "/{uuid}") + public ResponseEntity> post( + HttpServletRequest request, + @PathVariable String uuid, + @RequestParam @NotNull String token, + @RequestParam(required = false) List override + ) throws Exception { + Context context = ContextUtil.obtainContext(request); + try { + context.turnOffAuthorisationSystem(); + EPersonRest epersonRest = + ePersonRestRepository.mergeFromRegistrationData(context, UUID.fromString(uuid), token, override); + EPersonResource resource = converter.toResource(epersonRest); + return ControllerUtils.toResponseEntity(HttpStatus.CREATED, new HttpHeaders(), resource); + } catch (Exception e) { + throw e; + } finally { + context.restoreAuthSystemState(); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java index b06360ee1dc2..b5a0c957f265 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java @@ -39,6 +39,7 @@ import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; /** @@ -69,6 +70,8 @@ public class ItemOwningCollectionUpdateRestController { * moving the item to the new collection. * * @param uuid The UUID of the item that will be moved + * @param inheritCollectionPolicies Boolean flag whether to inherit the target collection policies when + * moving the item * @param response The response object * @param request The request object * @return The wrapped resource containing the new owning collection or null when the item was not moved @@ -79,7 +82,10 @@ public class ItemOwningCollectionUpdateRestController { @RequestMapping(method = RequestMethod.PUT, consumes = {"text/uri-list"}) @PreAuthorize("hasPermission(#uuid, 'ITEM','WRITE')") @PostAuthorize("returnObject != null") - public CollectionRest move(@PathVariable UUID uuid, HttpServletResponse response, + public CollectionRest move(@PathVariable UUID uuid, + @RequestParam(name = "inheritPolicies", defaultValue = "false") + Boolean inheritCollectionPolicies, + HttpServletResponse response, HttpServletRequest request) throws SQLException, IOException, AuthorizeException { Context context = ContextUtil.obtainContext(request); @@ -91,7 +97,8 @@ public CollectionRest move(@PathVariable UUID uuid, HttpServletResponse response "or the data cannot be resolved to a collection."); } - Collection targetCollection = performItemMove(context, uuid, (Collection) dsoList.get(0)); + Collection targetCollection = performItemMove(context, uuid, (Collection) dsoList.get(0), + inheritCollectionPolicies); if (targetCollection == null) { return null; @@ -107,17 +114,19 @@ public CollectionRest move(@PathVariable UUID uuid, HttpServletResponse response * @param item The item to be moved * @param currentCollection The current owning collection of the item * @param targetCollection The target collection of the item + * @param inheritPolicies Boolean flag whether to inherit the target collection policies when moving the item * @return The target collection * @throws SQLException If something goes wrong * @throws IOException If something goes wrong * @throws AuthorizeException If the user is not authorized to perform the move action */ private Collection moveItem(final Context context, final Item item, final Collection currentCollection, - final Collection targetCollection) + final Collection targetCollection, + final boolean inheritPolicies) throws SQLException, IOException, AuthorizeException { - itemService.move(context, item, currentCollection, targetCollection); - //Necessary because Controller does not pass through general RestResourceController, and as such does not do its - // commit in DSpaceRestRepository.createAndReturn() or similar + itemService.move(context, item, currentCollection, targetCollection, inheritPolicies); + // Necessary because Controller does not pass through general RestResourceController, and as such does not do + // its commit in DSpaceRestRepository.createAndReturn() or similar context.commit(); return context.reloadEntity(targetCollection); @@ -129,12 +138,14 @@ private Collection moveItem(final Context context, final Item item, final Collec * @param context The context Object * @param itemUuid The uuid of the item to be moved * @param targetCollection The target collection + * @param inheritPolicies Whether to inherit the target collection policies when moving the item * @return The new owning collection of the item when authorized or null when not authorized * @throws SQLException If something goes wrong * @throws IOException If something goes wrong * @throws AuthorizeException If the user is not authorized to perform the move action */ - private Collection performItemMove(final Context context, final UUID itemUuid, final Collection targetCollection) + private Collection performItemMove(final Context context, final UUID itemUuid, final Collection targetCollection, + boolean inheritPolicies) throws SQLException, IOException, AuthorizeException { Item item = itemService.find(context, itemUuid); @@ -153,7 +164,7 @@ private Collection performItemMove(final Context context, final UUID itemUuid, f if (authorizeService.authorizeActionBoolean(context, currentCollection, Constants.ADMIN)) { - return moveItem(context, item, currentCollection, targetCollection); + return moveItem(context, item, currentCollection, targetCollection, inheritPolicies); } return null; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemTemplateRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemTemplateRestController.java index e297dab44cad..a6dbf3496e49 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemTemplateRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemTemplateRestController.java @@ -120,7 +120,7 @@ public TemplateItemResource getTemplateItem(HttpServletRequest request, @PathVar * @throws SQLException * @throws AuthorizeException */ - @PreAuthorize("hasPermission(#uuid, 'ITEM', 'WRITE')") + @PreAuthorize("hasPermission(#uuid, 'ITEMTEMPLATE', 'WRITE')") @RequestMapping(method = RequestMethod.PATCH) public ResponseEntity> patch(HttpServletRequest request, @PathVariable UUID uuid, @RequestBody(required = true) JsonNode jsonNode) @@ -153,7 +153,7 @@ public ResponseEntity> patch(HttpServletRequest request, * @throws AuthorizeException * @throws IOException */ - @PreAuthorize("hasPermission(#uuid, 'ITEM', 'DELETE')") + @PreAuthorize("hasPermission(#uuid, 'ITEMTEMPLATE', 'DELETE')") @RequestMapping(method = RequestMethod.DELETE) public ResponseEntity> deleteTemplateItem(HttpServletRequest request, @PathVariable UUID uuid) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java index 6b41efb9b5f5..b7f94e379685 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java @@ -176,7 +176,7 @@ public void search(HttpServletRequest request, if (dsoObject != null) { container = scopeResolver.resolveScope(context, dsoObject); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso("site", container); + .getDiscoveryConfiguration(context, container); queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId()); queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries() .toArray( diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java new file mode 100644 index 000000000000..c236954dab48 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java @@ -0,0 +1,135 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID; +import static org.dspace.core.Constants.BITSTREAM; + +import java.util.List; +import java.util.UUID; +import javax.servlet.http.HttpServletRequest; + +import org.dspace.app.rest.converter.ConverterService; +import org.dspace.app.rest.exception.UnprocessableEntityException; +import org.dspace.app.rest.model.BundleRest; +import org.dspace.app.rest.model.hateoas.BundleResource; +import org.dspace.app.rest.repository.BundlePrimaryBitstreamLinkRepository; +import org.dspace.app.rest.utils.ContextUtil; +import org.dspace.app.rest.utils.Utils; +import org.dspace.content.Bitstream; +import org.dspace.content.DSpaceObject; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.rest.webmvc.ControllerUtils; +import org.springframework.hateoas.RepresentationModel; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +/** + * This RestController is responsible for managing primaryBitstreams on bundles. + * The endpoint can be found at /api/core/bundles/{bundle-uuid}/primaryBitstream + */ +@RestController +@RequestMapping("/api/" + BundleRest.CATEGORY + "/" + BundleRest.PLURAL_NAME + + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID + "/" + BundleRest.PRIMARY_BITSTREAM) +public class PrimaryBitstreamController { + + @Autowired + private BundlePrimaryBitstreamLinkRepository repository; + @Autowired + private ConverterService converter; + @Autowired + private Utils utils; + + /** + * This method creates a primaryBitstream on the given Bundle. + *
      + * curl -i -X POST "http://{dspace.server.url}/api/core/bundles/{bundle-uuid}/primaryBitstream" + * -H "Content-type:text/uri-list" + * -d "https://{dspace.server.url}/api/core/bitstreams/{bitstream-uuid}" + * + * + * @param uuid The UUID of the Bundle on which the primaryBitstream will be set + * @param request The HttpServletRequest + * @return The Bundle on which the primaryBitstream was set + */ + @PreAuthorize("hasPermission(#uuid, 'BUNDLE', 'WRITE')") + @RequestMapping(method = RequestMethod.POST, consumes = {"text/uri-list"}) + public ResponseEntity> createPrimaryBitstream(@PathVariable UUID uuid, + HttpServletRequest request) { + Context context = ContextUtil.obtainContext(request); + BundleRest bundleRest = repository.createPrimaryBitstream(context, uuid, + getBitstreamFromRequest(context, request), + utils.obtainProjection()); + return ControllerUtils.toResponseEntity(HttpStatus.CREATED, new HttpHeaders(), + (RepresentationModel) converter.toResource(bundleRest)); + } + + /** + * This method updates the primaryBitstream on the given Bundle. + *
      + * curl -i -X PUT "http://{dspace.server.url}/api/core/bundles/{bundle-uuid}/primaryBitstream" + * -H "Content-type:text/uri-list" + * -d "https://{dspace.server.url}/api/core/bitstreams/{bitstream-uuid}" + * + * + * @param uuid The UUID of the Bundle of which the primaryBitstream will be updated + * @param request The HttpServletRequest + * @return The Bundle of which the primaryBitstream was updated + */ + @PreAuthorize("hasPermission(#uuid, 'BUNDLE', 'WRITE')") + @RequestMapping(method = RequestMethod.PUT, consumes = {"text/uri-list"}) + public BundleResource updatePrimaryBitstream(@PathVariable UUID uuid, + HttpServletRequest request) { + Context context = ContextUtil.obtainContext(request); + BundleRest bundleRest = repository.updatePrimaryBitstream(context, uuid, + getBitstreamFromRequest(context, request), + utils.obtainProjection()); + return converter.toResource(bundleRest); + } + + /** + * This method deletes the primaryBitstream on the given Bundle. + *
      + * curl -i -X DELETE "http://{dspace.server.url}/api/core/bundles/{bundle-uuid}/primaryBitstream" + * + * + * @param uuid The UUID of the Bundle of which the primaryBitstream will be deleted + * @param request The HttpServletRequest + * @return The Bundle of which the primaryBitstream was deleted + */ + @PreAuthorize("hasPermission(#uuid, 'BUNDLE', 'WRITE')") + @RequestMapping(method = RequestMethod.DELETE) + public ResponseEntity> deletePrimaryBitstream(@PathVariable UUID uuid, + HttpServletRequest request) { + Context context = ContextUtil.obtainContext(request); + repository.deletePrimaryBitstream(context, uuid); + return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); + } + + /** + * This method parses a URI from the request body and resolves it to a Bitstream. + * + * @param context The current DSpace context + * @param request The HttpServletRequest + * @return The resolved Bitstream + */ + private Bitstream getBitstreamFromRequest(Context context, HttpServletRequest request) { + List dsoList = utils.constructDSpaceObjectList(context, utils.getStringListFromRequest(request)); + if (dsoList.size() != 1 || dsoList.get(0).getType() != BITSTREAM) { + throw new UnprocessableEntityException("URI does not resolve to an existing bitstream."); + } + return (Bitstream) dsoList.get(0); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ResourcePolicyEPersonReplaceRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ResourcePolicyEPersonReplaceRestController.java index e772aa0abe18..b02869962156 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ResourcePolicyEPersonReplaceRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ResourcePolicyEPersonReplaceRestController.java @@ -23,7 +23,9 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.content.Bitstream; import org.dspace.content.DSpaceObject; +import org.dspace.content.service.BitstreamService; import org.dspace.core.Context; import org.dspace.eperson.EPerson; import org.springframework.beans.factory.annotation.Autowired; @@ -51,6 +53,8 @@ public class ResourcePolicyEPersonReplaceRestController { private Utils utils; @Autowired private ResourcePolicyService resourcePolicyService; + @Autowired + private BitstreamService bitstreamService; @PreAuthorize("hasPermission(#id, 'resourcepolicy', 'ADMIN')") @RequestMapping(method = PUT, consumes = {"text/uri-list"}) @@ -75,6 +79,11 @@ public ResponseEntity> replaceEPersonOfResourcePolicy(@Pa } EPerson newEPerson = (EPerson) dsoList.get(0); resourcePolicy.setEPerson(newEPerson); + + if (bitstreamService.isOriginalBitstream(resourcePolicy.getdSpaceObject())) { + bitstreamService.updateThumbnailResourcePolicies(context, (Bitstream) resourcePolicy.getdSpaceObject()); + } + context.commit(); return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ResourcePolicyGroupReplaceRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ResourcePolicyGroupReplaceRestController.java index e9ba0dff4429..40a82068dbce 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ResourcePolicyGroupReplaceRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ResourcePolicyGroupReplaceRestController.java @@ -23,7 +23,9 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.content.Bitstream; import org.dspace.content.DSpaceObject; +import org.dspace.content.service.BitstreamService; import org.dspace.core.Context; import org.dspace.eperson.Group; import org.springframework.beans.factory.annotation.Autowired; @@ -51,6 +53,8 @@ public class ResourcePolicyGroupReplaceRestController { private Utils utils; @Autowired private ResourcePolicyService resourcePolicyService; + @Autowired + private BitstreamService bitstreamService; @PreAuthorize("hasPermission(#id, 'resourcepolicy', 'ADMIN')") @RequestMapping(method = PUT, consumes = {"text/uri-list"}) @@ -75,6 +79,11 @@ public ResponseEntity> replaceGroupOfResourcePolicy(@Path Group newGroup = (Group) dsoList.get(0); resourcePolicy.setGroup(newGroup); + + if (bitstreamService.isOriginalBitstream(resourcePolicy.getdSpaceObject())) { + bitstreamService.updateThumbnailResourcePolicies(context, (Bitstream) resourcePolicy.getdSpaceObject()); + } + context.commit(); return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java index 4802580579a7..a8488b56975a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java @@ -12,18 +12,23 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.app.rest.converter.ConverterService; +import org.dspace.app.rest.exception.DSpaceBadRequestException; import org.dspace.app.rest.model.ProcessRest; import org.dspace.app.rest.model.ScriptRest; import org.dspace.app.rest.model.hateoas.ProcessResource; import org.dspace.app.rest.repository.ScriptRestRepository; import org.dspace.app.rest.utils.ContextUtil; import org.dspace.core.Context; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.dspace.scripts.service.ScriptService; import org.dspace.services.RequestService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.rest.webmvc.ControllerUtils; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.hateoas.RepresentationModel; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.web.bind.annotation.PathVariable; @@ -48,6 +53,9 @@ public class ScriptProcessesController { @Autowired private ScriptRestRepository scriptRestRepository; + @Autowired + private ScriptService scriptService; + @Autowired private RequestService requestService; @@ -59,10 +67,12 @@ public class ScriptProcessesController { * @return The ProcessResource object for the created process * @throws Exception If something goes wrong */ - @RequestMapping(method = RequestMethod.POST) + @RequestMapping(method = RequestMethod.POST, consumes = MediaType.MULTIPART_FORM_DATA_VALUE) @PreAuthorize("permitAll()") - public ResponseEntity> startProcess(@PathVariable(name = "name") String scriptName, - @RequestParam(name = "file", required = false) List files) throws Exception { + public ResponseEntity> startProcess( + @PathVariable(name = "name") String scriptName, + @RequestParam(name = "file", required = false) List files) + throws Exception { if (log.isTraceEnabled()) { log.trace("Starting Process for Script with name: " + scriptName); } @@ -73,4 +83,21 @@ public ResponseEntity> startProcess(@PathVariable(name = return ControllerUtils.toResponseEntity(HttpStatus.ACCEPTED, new HttpHeaders(), processResource); } + @RequestMapping(method = RequestMethod.POST, consumes = "!" + MediaType.MULTIPART_FORM_DATA_VALUE) + @PreAuthorize("hasAuthority('AUTHENTICATED')") + public ResponseEntity> startProcessInvalidMimeType( + @PathVariable(name = "name") String scriptName) + throws Exception { + if (log.isTraceEnabled()) { + log.trace("Starting Process for Script with name: " + scriptName); + } + Context context = ContextUtil.obtainContext(requestService.getCurrentRequest().getHttpServletRequest()); + ScriptConfiguration scriptToExecute = scriptService.getScriptConfiguration(scriptName); + + if (scriptToExecute == null) { + throw new ResourceNotFoundException("The script for name: " + scriptName + " wasn't found"); + } + throw new DSpaceBadRequestException("Invalid mimetype"); + } + } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/Application.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/WebApplication.java similarity index 73% rename from dspace-server-webapp/src/main/java/org/dspace/app/rest/Application.java rename to dspace-server-webapp/src/main/java/org/dspace/app/rest/WebApplication.java index c85efed835bf..684cbbdc1d7a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/Application.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/WebApplication.java @@ -12,13 +12,12 @@ import java.util.List; import javax.servlet.Filter; +import org.apache.commons.lang3.ArrayUtils; import org.dspace.app.rest.filter.DSpaceRequestContextFilter; import org.dspace.app.rest.model.hateoas.DSpaceLinkRelationProvider; import org.dspace.app.rest.parameter.resolver.SearchFilterResolver; import org.dspace.app.rest.utils.ApplicationConfig; import org.dspace.app.rest.utils.DSpaceAPIRequestLoggingFilter; -import org.dspace.app.rest.utils.DSpaceConfigurationInitializer; -import org.dspace.app.rest.utils.DSpaceKernelInitializer; import org.dspace.app.sitemap.GenerateSitemaps; import org.dspace.app.solrdatabaseresync.SolrDatabaseResyncCli; import org.dspace.app.util.DSpaceContextListener; @@ -27,11 +26,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.autoconfigure.SpringBootApplication; -import org.springframework.boot.builder.SpringApplicationBuilder; -import org.springframework.boot.web.servlet.support.SpringBootServletInitializer; import org.springframework.cache.annotation.EnableCaching; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; import org.springframework.core.annotation.Order; import org.springframework.hateoas.server.LinkRelationProvider; import org.springframework.lang.NonNull; @@ -47,24 +44,18 @@ import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; /** - * Define the Spring Boot Application settings itself. This class takes the place - * of a web.xml file, and configures all Filters/Listeners as methods (see below). - *

      - * NOTE: Requires a Servlet 3.0 container, e.g. Tomcat 7.0 or above. - *

      - * NOTE: This extends SpringBootServletInitializer in order to allow us to build - * a deployable WAR file with Spring Boot. See: - * http://docs.spring.io/spring-boot/docs/current/reference/htmlsingle/#howto-create-a-deployable-war-file + * Main configuration for the dspace web module. * * @author Andrea Bollini (andrea.bollini at 4science.it) * @author Tim Donohue + * @author Luca Giamminonni (luca.giamminonni at 4science.it) */ -@SpringBootApplication @EnableScheduling @EnableCaching -public class Application extends SpringBootServletInitializer { +@Configuration +public class WebApplication { - private static final Logger log = LoggerFactory.getLogger(Application.class); + private static final Logger log = LoggerFactory.getLogger(WebApplication.class); @Autowired private ApplicationConfig configuration; @@ -87,26 +78,6 @@ public void sendGoogleAnalyticsEvents() { googleAsyncEventListener.sendCollectedEvents(); } - /** - * Override the default SpringBootServletInitializer.configure() method, - * passing it this Application class. - *

      - * This is necessary to allow us to build a deployable WAR, rather than - * always relying on embedded Tomcat. - *

      - * See: http://docs.spring.io/spring-boot/docs/current/reference/htmlsingle/#howto-create-a-deployable-war-file - * - * @param application - * @return - */ - @Override - protected SpringApplicationBuilder configure(SpringApplicationBuilder application) { - // Pass this Application class, and our initializers for DSpace Kernel and Configuration - // NOTE: Kernel must be initialized before Configuration - return application.sources(Application.class) - .initializers(new DSpaceKernelInitializer(), new DSpaceConfigurationInitializer()); - } - /** * Register the "DSpaceContextListener" so that it is loaded * for this Application. @@ -187,9 +158,31 @@ public void addCorsMappings(@NonNull CorsRegistry registry) { .getCorsAllowedOrigins(configuration.getCorsAllowedOriginsConfig()); String[] iiifAllowedOrigins = configuration .getCorsAllowedOrigins(configuration.getIiifAllowedOriginsConfig()); + String[] bitstreamAllowedOrigins = configuration + .getCorsAllowedOrigins(configuration.getBitstreamAllowedOriginsConfig()); + String[] signpostingAllowedOrigins = configuration + .getCorsAllowedOrigins(configuration.getSignpostingAllowedOriginsConfig()); boolean corsAllowCredentials = configuration.getCorsAllowCredentials(); boolean iiifAllowCredentials = configuration.getIiifAllowCredentials(); + boolean bitstreamAllowCredentials = configuration.getBitstreamsAllowCredentials(); + boolean signpostingAllowCredentials = configuration.getSignpostingAllowCredentials(); + + if (ArrayUtils.isEmpty(bitstreamAllowedOrigins)) { + bitstreamAllowedOrigins = corsAllowedOrigins; + } + if (!ArrayUtils.isEmpty(bitstreamAllowedOrigins)) { + registry.addMapping("/api/core/bitstreams/**").allowedMethods(CorsConfiguration.ALL) + // Set Access-Control-Allow-Credentials to "true" and specify which origins are valid + // for our Access-Control-Allow-Origin header + .allowCredentials(bitstreamAllowCredentials).allowedOrigins(bitstreamAllowedOrigins) + // Allow list of request preflight headers allowed to be sent to us from the client + .allowedHeaders("Accept", "Authorization", "Content-Type", "Origin", "X-On-Behalf-Of", + "X-Requested-With", "X-XSRF-TOKEN", "X-CORRELATION-ID", "X-REFERRER", + "x-recaptcha-token", "Access-Control-Allow-Origin") + // Allow list of response headers allowed to be sent by us (the server) to the client + .exposedHeaders("Authorization", "DSPACE-XSRF-TOKEN", "Location", "WWW-Authenticate"); + } if (corsAllowedOrigins != null) { registry.addMapping("/api/**").allowedMethods(CorsConfiguration.ALL) // Set Access-Control-Allow-Credentials to "true" and specify which origins are valid @@ -215,6 +208,18 @@ public void addCorsMappings(@NonNull CorsRegistry registry) { // Allow list of response headers allowed to be sent by us (the server) to the client .exposedHeaders("Authorization", "DSPACE-XSRF-TOKEN", "Location", "WWW-Authenticate"); } + if (signpostingAllowedOrigins != null) { + registry.addMapping("/signposting/**").allowedMethods(CorsConfiguration.ALL) + // Set Access-Control-Allow-Credentials to "true" and specify which origins are valid + // for our Access-Control-Allow-Origin header + .allowCredentials(signpostingAllowCredentials).allowedOrigins(signpostingAllowedOrigins) + // Allow list of request preflight headers allowed to be sent to us from the client + .allowedHeaders("Accept", "Authorization", "Content-Type", "Origin", "X-On-Behalf-Of", + "X-Requested-With", "X-XSRF-TOKEN", "X-CORRELATION-ID", "X-REFERRER", + "x-recaptcha-token", "access-control-allow-headers") + // Allow list of response headers allowed to be sent by us (the server) to the client + .exposedHeaders("Authorization", "DSPACE-XSRF-TOKEN", "Location", "WWW-Authenticate"); + } } /** @@ -244,7 +249,7 @@ public void addResourceHandlers(ResourceHandlerRegistry registry) { // Make all other Webjars available off the /webjars path registry .addResourceHandler("/webjars/**") - .addResourceLocations("/webjars/"); + .addResourceLocations("/webjars/", "classpath:/META-INF/resources/webjars/"); } @Override diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/EditMetadataFeature.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/EditMetadataFeature.java index 820de57b7246..80a052224fb8 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/EditMetadataFeature.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/EditMetadataFeature.java @@ -60,12 +60,12 @@ public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLEx ) { String defaultGroupUUID = configurationService.getProperty("edit.metadata.allowed-group"); if (StringUtils.isBlank(defaultGroupUUID)) { - return authorizeServiceRestUtil.authorizeActionBoolean(context, object,DSpaceRestPermission.WRITE); + return authorizeServiceRestUtil.authorizeActionBoolean(context, object, DSpaceRestPermission.WRITE); } Group defaultGroup = StringUtils.isNotBlank(defaultGroupUUID) ? groupService.find(context, UUID.fromString(defaultGroupUUID)) : null; if (Objects.nonNull(defaultGroup) && groupService.isMember(context, defaultGroup)) { - return authorizeServiceRestUtil.authorizeActionBoolean(context, object,DSpaceRestPermission.WRITE); + return authorizeServiceRestUtil.authorizeActionBoolean(context, object, DSpaceRestPermission.WRITE); } } return false; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AInprogressItemConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AInprogressItemConverter.java index 248342e1b19b..c936f9b622f9 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AInprogressItemConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AInprogressItemConverter.java @@ -21,7 +21,6 @@ import org.dspace.app.rest.submit.SubmissionService; import org.dspace.app.rest.utils.ContextUtil; import org.dspace.app.util.SubmissionConfig; -import org.dspace.app.util.SubmissionConfigReader; import org.dspace.app.util.SubmissionConfigReaderException; import org.dspace.app.util.SubmissionStepConfig; import org.dspace.content.Collection; @@ -31,6 +30,8 @@ import org.dspace.eperson.EPerson; import org.dspace.services.RequestService; import org.dspace.services.model.Request; +import org.dspace.submit.factory.SubmissionServiceFactory; +import org.dspace.submit.service.SubmissionConfigService; import org.dspace.validation.service.ValidationService; import org.dspace.versioning.ItemCorrectionService; import org.springframework.beans.factory.annotation.Autowired; @@ -60,7 +61,7 @@ public abstract class AInprogressItemConverter metadataList = new ArrayList(); - if (obj.isMetadataIndex()) { + String id = obj.getName(); + if (isValidControlledVocabularyIndex(obj)) { + DSpaceControlledVocabularyIndex vocObj = (DSpaceControlledVocabularyIndex) obj; + metadataList = new ArrayList<>(vocObj.getMetadataFields()); + id = vocObj.getVocabulary().getPluginInstanceName(); + bir.setFacetType(vocObj.getFacetConfig().getIndexFieldName()); + bir.setVocabulary(vocObj.getVocabulary().getPluginInstanceName()); + bir.setBrowseType(BROWSE_TYPE_HIERARCHICAL); + } else if (obj.isMetadataIndex()) { for (String s : obj.getMetadata().split(",")) { metadataList.add(s.trim()); } + bir.setDataType(obj.getDataType()); + bir.setOrder(obj.getDefaultOrder()); + bir.setBrowseType(BROWSE_TYPE_VALUE_LIST); } else { metadataList.add(obj.getSortOption().getMetadata()); + bir.setDataType(obj.getDataType()); + bir.setOrder(obj.getDefaultOrder()); + bir.setBrowseType(BROWSE_TYPE_FLAT); } + bir.setId(id); bir.setMetadataList(metadataList); List sortOptionsList = new ArrayList(); @@ -52,10 +71,32 @@ public BrowseIndexRest convert(BrowseIndex obj, Projection projection) { } catch (SortException e) { throw new RuntimeException(e.getMessage(), e); } - bir.setSortOptions(sortOptionsList); + if (!bir.getBrowseType().equals(BROWSE_TYPE_HIERARCHICAL)) { + bir.setSortOptions(sortOptionsList); + } return bir; } + private static boolean isValidControlledVocabularyIndex(BrowseIndex obj) { + return obj instanceof DSpaceControlledVocabularyIndex && + hasIndexFieldName((DSpaceControlledVocabularyIndex) obj) && + hasIndexPluginInstanceName((DSpaceControlledVocabularyIndex) obj); + } + + private static boolean hasIndexFieldName(DSpaceControlledVocabularyIndex obj) { + return Optional.ofNullable(obj) + .map(DSpaceControlledVocabularyIndex::getFacetConfig) + .map(DiscoverySearchFilter::getIndexFieldName) + .isPresent(); + } + + private static boolean hasIndexPluginInstanceName(DSpaceControlledVocabularyIndex obj) { + return Optional.ofNullable(obj) + .map(DSpaceControlledVocabularyIndex::getVocabulary) + .map(DSpaceControlledVocabulary::getPluginInstanceName) + .isPresent(); + } + @Override public Class getModelClass() { return BrowseIndex.class; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BulkAccessConditionConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BulkAccessConditionConverter.java new file mode 100644 index 000000000000..5516fbc834ff --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BulkAccessConditionConverter.java @@ -0,0 +1,77 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.converter; +import java.text.ParseException; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.app.bulkaccesscontrol.model.BulkAccessConditionConfiguration; +import org.dspace.app.rest.model.AccessConditionOptionRest; +import org.dspace.app.rest.model.BulkAccessConditionRest; +import org.dspace.app.rest.projection.Projection; +import org.dspace.submit.model.AccessConditionOption; +import org.dspace.util.DateMathParser; +import org.springframework.stereotype.Component; + +/** + * This converter will convert an object of {@Link BulkAccessConditionConfiguration} + * to an object of {@link BulkAccessConditionRest}. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +@Component +public class BulkAccessConditionConverter + implements DSpaceConverter { + + DateMathParser dateMathParser = new DateMathParser(); + + @Override + public BulkAccessConditionRest convert(BulkAccessConditionConfiguration config, Projection projection) { + BulkAccessConditionRest model = new BulkAccessConditionRest(); + model.setId(config.getName()); + model.setProjection(projection); + + for (AccessConditionOption itemAccessConditionOption : config.getItemAccessConditionOptions()) { + model.getItemAccessConditionOptions().add(convertToRest(itemAccessConditionOption)); + } + + for (AccessConditionOption bitstreamAccessConditionOption : config.getBitstreamAccessConditionOptions()) { + model.getBitstreamAccessConditionOptions().add(convertToRest(bitstreamAccessConditionOption)); + } + return model; + } + + private AccessConditionOptionRest convertToRest(AccessConditionOption option) { + AccessConditionOptionRest optionRest = new AccessConditionOptionRest(); + optionRest.setHasStartDate(option.getHasStartDate()); + optionRest.setHasEndDate(option.getHasEndDate()); + if (StringUtils.isNotBlank(option.getStartDateLimit())) { + try { + optionRest.setMaxStartDate(dateMathParser.parseMath(option.getStartDateLimit())); + } catch (ParseException e) { + throw new IllegalStateException("Wrong start date limit configuration for the access condition " + + "option named " + option.getName()); + } + } + if (StringUtils.isNotBlank(option.getEndDateLimit())) { + try { + optionRest.setMaxEndDate(dateMathParser.parseMath(option.getEndDateLimit())); + } catch (ParseException e) { + throw new IllegalStateException("Wrong end date limit configuration for the access condition " + + "option named " + option.getName()); + } + } + optionRest.setName(option.getName()); + return optionRest; + } + + @Override + public Class getModelClass() { + return BulkAccessConditionConfiguration.class; + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CollectionConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CollectionConverter.java index fc5d99b05924..e9b6aa03b85a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CollectionConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CollectionConverter.java @@ -8,6 +8,7 @@ package org.dspace.app.rest.converter; import org.dspace.app.rest.model.CollectionRest; +import org.dspace.app.rest.projection.Projection; import org.dspace.content.Collection; import org.dspace.discovery.IndexableObject; import org.springframework.stereotype.Component; @@ -22,6 +23,13 @@ public class CollectionConverter extends DSpaceObjectConverter implements IndexableObjectConverter { + @Override + public CollectionRest convert(Collection collection, Projection projection) { + CollectionRest resource = super.convert(collection, projection); + resource.setArchivedItemsCount(collection.countArchivedItems()); + return resource; + } + @Override protected CollectionRest newInstance() { return new CollectionRest(); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CommunityConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CommunityConverter.java index d4c06470ce86..a90ad3cfe644 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CommunityConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CommunityConverter.java @@ -8,6 +8,7 @@ package org.dspace.app.rest.converter; import org.dspace.app.rest.model.CommunityRest; +import org.dspace.app.rest.projection.Projection; import org.dspace.content.Community; import org.dspace.discovery.IndexableObject; import org.springframework.stereotype.Component; @@ -23,6 +24,13 @@ public class CommunityConverter extends DSpaceObjectConverter implements IndexableObjectConverter { + public CommunityRest convert(Community community, Projection projection) { + CommunityRest resource = super.convert(community, projection); + resource.setArchivedItemsCount(community.countArchivedItems()); + + return resource; + } + @Override protected CommunityRest newInstance() { return new CommunityRest(); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ConverterService.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ConverterService.java index 0f7b47239e3f..e83790495146 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ConverterService.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ConverterService.java @@ -202,17 +202,18 @@ private Annotation getDefaultFindOnePreAuthorize() { * @throws ClassCastException if the converter's return type is not compatible with the inferred return type. */ public Page toRestPage(List modelObjects, Pageable pageable, Projection projection) { + if (pageable == null) { + pageable = utils.getPageable(pageable); + } + List pageableObjects = utils.getPageObjectList(modelObjects, pageable); List transformedList = new LinkedList<>(); - for (M modelObject : modelObjects) { + for (M modelObject : pageableObjects) { R transformedObject = toRest(modelObject, projection); if (transformedObject != null) { transformedList.add(transformedObject); } } - if (pageable == null) { - pageable = utils.getPageable(pageable); - } - return utils.getPage(transformedList, pageable); + return new PageImpl(transformedList, pageable, modelObjects.size()); } /** diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CrisLayoutTabConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CrisLayoutTabConverter.java index d35a80b341a8..2c94bd8a8f3f 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CrisLayoutTabConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CrisLayoutTabConverter.java @@ -9,6 +9,7 @@ import java.sql.SQLException; import java.util.List; +import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; @@ -27,11 +28,14 @@ import org.dspace.core.Context; import org.dspace.core.exception.SQLRuntimeException; import org.dspace.layout.CrisLayoutBox; +import org.dspace.layout.CrisLayoutBox2SecurityGroup; import org.dspace.layout.CrisLayoutCell; import org.dspace.layout.CrisLayoutRow; import org.dspace.layout.CrisLayoutTab; +import org.dspace.layout.CrisLayoutTab2SecurityGroup; import org.dspace.layout.LayoutSecurity; import org.dspace.layout.service.CrisLayoutBoxService; +import org.dspace.layout.service.CrisLayoutTabService; import org.dspace.services.RequestService; import org.dspace.util.UUIDUtils; import org.springframework.beans.factory.annotation.Autowired; @@ -39,7 +43,7 @@ /** * This is the converter from Entity CrisLayoutTab to the REST data model - * + * * @author Danilo Di Nuzzo (danilo.dinuzzo at 4science.it) * */ @@ -61,17 +65,47 @@ public class CrisLayoutTabConverter implements DSpaceConverter convertTab(tab, projection)) + .orElseGet(CrisLayoutTabRest::new); + } + + private boolean hasAccess(Item item, CrisLayoutTab tab) { + Context context = ContextUtil.obtainCurrentRequestContext(); + return crisLayoutTabService.hasAccess(context, tab, item); + } + + private CrisLayoutTab findAlternativeTab(CrisLayoutTab tab) { + return tab.getTab2SecurityGroups() + .stream() + .map(CrisLayoutTab2SecurityGroup::getAlternativeTab) + .filter(Objects::nonNull) + .findFirst() + .orElse(null); + } + + private CrisLayoutTabRest convertTab(CrisLayoutTab tab, Projection projection) { CrisLayoutTabRest rest = new CrisLayoutTabRest(); - rest.setId(model.getID()); - rest.setEntityType(model.getEntity().getLabel()); - rest.setShortname(model.getShortName()); - rest.setHeader(model.getHeader()); - rest.setPriority(model.getPriority()); - rest.setSecurity(model.getSecurity()); - rest.setRows(convertRows(getScopeItem(), model.getRows(), projection)); - rest.setLeading(model.isLeading()); + rest.setId(tab.getID()); + rest.setEntityType(tab.getEntity().getLabel()); + rest.setCustomFilter(tab.getCustomFilter()); + rest.setShortname(tab.getShortName()); + rest.setHeader(tab.getHeader()); + rest.setPriority(tab.getPriority()); + rest.setSecurity(tab.getSecurity()); + rest.setRows(convertRows(getScopeItem(), tab.getRows(), projection)); + rest.setLeading(tab.isLeading()); return rest; } @@ -87,6 +121,7 @@ public CrisLayoutTab toModel(Context context, CrisLayoutTabRest rest) { tab.setSecurity(LayoutSecurity.valueOf(rest.getSecurity())); tab.setShortName(rest.getShortname()); tab.setEntity(findEntityType(context, rest)); + tab.setCustomFilter(rest.getCustomFilter()); tab.setLeading(rest.isLeading()); rest.getRows().forEach(row -> tab.addRow(toRowModel(context, row))); return tab; @@ -122,15 +157,43 @@ private CrisLayoutCellRest convertCell(Item item, CrisLayoutCell cell, Projectio private List convertBoxes(Item item, List boxes, Projection projection) { return boxes.stream() - .filter(box -> item == null || hasAccess(item, box)) - .map(box -> boxConverter.convert(box, projection)) - .collect(Collectors.toList()); + .map(box -> getCrisLayoutBox(item, box)) + .filter(Objects::nonNull) + .map(box -> boxConverter.convert(box, projection)) + .collect(Collectors.toList()); + } + + private CrisLayoutBox getCrisLayoutBox(Item item, CrisLayoutBox box) { + + if (item == null) { + return box; + } + + return Optional.of(box) + .filter(b -> hasAccess(item, b) && hasContent(item, b)) + .orElseGet(() -> + Optional.ofNullable(findAlternativeBox(box)) + .filter(altBox -> hasContent(item, altBox)) + .orElse(null)); } private boolean hasAccess(Item item, CrisLayoutBox box) { Context context = ContextUtil.obtainCurrentRequestContext(); - return crisLayoutBoxService.hasContent(context, box, item) - && crisLayoutBoxService.hasAccess(context, box, item); + return crisLayoutBoxService.hasAccess(context, box, item); + } + + private boolean hasContent(Item item, CrisLayoutBox box) { + Context context = ContextUtil.obtainCurrentRequestContext(); + return crisLayoutBoxService.hasContent(context, box, item); + } + + private CrisLayoutBox findAlternativeBox(CrisLayoutBox box) { + return box.getBox2SecurityGroups() + .stream() + .map(CrisLayoutBox2SecurityGroup::getAlternativeBox) + .filter(Objects::nonNull) + .findFirst() + .orElse(null); } private CrisLayoutRow toRowModel(Context context, CrisLayoutRowRest rowRest) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/DiscoverConfigurationConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/DiscoverConfigurationConverter.java index 73851bd94523..41cf235a878b 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/DiscoverConfigurationConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/DiscoverConfigurationConverter.java @@ -80,6 +80,15 @@ private void addSortOptions(SearchConfigurationRest searchConfigurationRest, sortOption.setSortOrder(discoverySearchSortConfiguration.getDefaultSortOrder().name()); searchConfigurationRest.addSortOption(sortOption); } + + DiscoverySortFieldConfiguration defaultSortField = searchSortConfiguration.getDefaultSortField(); + if (defaultSortField != null) { + SearchConfigurationRest.SortOption sortOption = new SearchConfigurationRest.SortOption(); + sortOption.setName(defaultSortField.getMetadataField()); + sortOption.setActualName(defaultSortField.getType()); + sortOption.setSortOrder(defaultSortField.getDefaultSortOrder().name()); + searchConfigurationRest.setDefaultSortOption(sortOption); + } } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/EditItemConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/EditItemConverter.java index 24c8761268c5..f19ce63d6b5f 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/EditItemConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/EditItemConverter.java @@ -95,7 +95,7 @@ protected void fillFromModel(EditItem obj, EditItemRest rest, Projection project rest.setId(obj.getID() + ":" + mode.getName()); SubmissionDefinitionRest def = converter.toRest( - submissionConfigReader.getSubmissionConfigByName(mode.getSubmissionDefinition()), projection); + submissionConfigService.getSubmissionConfigByName(mode.getSubmissionDefinition()), projection); rest.setSubmissionDefinition(def); storeSubmissionName(def.getName()); for (SubmissionSectionRest sections : def.getPanels()) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ExternalSourceEntryRestConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ExternalSourceEntryRestConverter.java index 585de2a99a57..35921c16d254 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ExternalSourceEntryRestConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ExternalSourceEntryRestConverter.java @@ -7,9 +7,19 @@ */ package org.dspace.app.rest.converter; +import java.sql.SQLException; +import java.util.List; +import java.util.Objects; +import java.util.UUID; +import java.util.stream.Collectors; + import org.dspace.app.rest.model.ExternalSourceEntryRest; +import org.dspace.app.rest.model.ItemRest; import org.dspace.app.rest.projection.Projection; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; import org.dspace.external.model.ExternalDataObject; +import org.dspace.web.ContextUtil; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -22,6 +32,12 @@ public class ExternalSourceEntryRestConverter implements DSpaceConverter convertToItemRests(List uuids, Projection projection) { + + if (uuids == null) { + return List.of(); + } + + Context context = ContextUtil.obtainCurrentRequestContext(); + return uuids.stream() + .map(uuid -> { + try { + return itemService.find(context, uuid); + } catch (SQLException e) { + throw new RuntimeException(e); + } + }) + .filter(item -> Objects.nonNull(item)) + .map(item -> itemConverter.convert(item, projection)) + .collect(Collectors.toList()); + } + public Class getModelClass() { return ExternalDataObject.class; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/MetadataConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/MetadataConverter.java index 76aca4be231d..da47f3d8b659 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/MetadataConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/MetadataConverter.java @@ -35,7 +35,7 @@ * Converter to translate between lists of domain {@link MetadataValue}s and {@link MetadataRest} representations. */ @Component -public class MetadataConverter implements DSpaceConverter { +public class MetadataConverter implements DSpaceConverter> { @Autowired private ContentServiceFactory contentServiceFactory; @@ -46,7 +46,7 @@ public class MetadataConverter implements DSpaceConverter convert(MetadataValueList metadataValues, Projection projection) { // Convert each value to a DTO while retaining place order in a map of key -> SortedSet Map> mapOfSortedSets = new HashMap<>(); @@ -60,7 +60,7 @@ public MetadataRest convert(MetadataValueList metadataValues, set.add(converter.toRest(metadataValue, projection)); } - MetadataRest metadataRest = new MetadataRest(); + MetadataRest metadataRest = new MetadataRest<>(); // Populate MetadataRest's map of key -> List while respecting SortedSet's order Map> mapOfLists = metadataRest.getMap(); @@ -80,14 +80,14 @@ public Class getModelClass() { * Sets a DSpace object's domain metadata values from a rest representation. * Any existing metadata value is deleted or overwritten. * - * @param context the context to use. - * @param dso the DSpace object. + * @param context the context to use. + * @param dso the DSpace object. * @param metadataRest the rest representation of the new metadata. - * @throws SQLException if a database error occurs. + * @throws SQLException if a database error occurs. * @throws AuthorizeException if an authorization error occurs. */ public void setMetadata(Context context, T dso, MetadataRest metadataRest) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { DSpaceObjectService dsoService = contentServiceFactory.getDSpaceObjectService(dso); dsoService.clearMetadata(context, dso, Item.ANY, Item.ANY, Item.ANY, Item.ANY); persistMetadataRest(context, dso, metadataRest, dsoService); @@ -97,14 +97,14 @@ public void setMetadata(Context context, T dso, Metadat * Add to a DSpace object's domain metadata values from a rest representation. * Any existing metadata value is preserved. * - * @param context the context to use. - * @param dso the DSpace object. + * @param context the context to use. + * @param dso the DSpace object. * @param metadataRest the rest representation of the new metadata. - * @throws SQLException if a database error occurs. + * @throws SQLException if a database error occurs. * @throws AuthorizeException if an authorization error occurs. */ public void addMetadata(Context context, T dso, MetadataRest metadataRest) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { DSpaceObjectService dsoService = contentServiceFactory.getDSpaceObjectService(dso); persistMetadataRest(context, dso, metadataRest, dsoService); } @@ -113,33 +113,34 @@ public void addMetadata(Context context, T dso, Metadat * Merge into a DSpace object's domain metadata values from a rest representation. * Any existing metadata value is preserved or overwritten with the new ones * - * @param context the context to use. - * @param dso the DSpace object. + * @param context the context to use. + * @param dso the DSpace object. * @param metadataRest the rest representation of the new metadata. - * @throws SQLException if a database error occurs. + * @throws SQLException if a database error occurs. * @throws AuthorizeException if an authorization error occurs. */ - public void mergeMetadata(Context context, T dso, MetadataRest metadataRest) - throws SQLException, AuthorizeException { + public void mergeMetadata( + Context context, T dso, MetadataRest metadataRest + ) throws SQLException, AuthorizeException { DSpaceObjectService dsoService = contentServiceFactory.getDSpaceObjectService(dso); - for (Map.Entry> entry: metadataRest.getMap().entrySet()) { + for (Map.Entry> entry : metadataRest.getMap().entrySet()) { List metadataByMetadataString = dsoService.getMetadataByMetadataString(dso, entry.getKey()); dsoService.removeMetadataValues(context, dso, metadataByMetadataString); } persistMetadataRest(context, dso, metadataRest, dsoService); } - private void persistMetadataRest(Context context, T dso, MetadataRest metadataRest, - DSpaceObjectService dsoService) - throws SQLException, AuthorizeException { - for (Map.Entry> entry: metadataRest.getMap().entrySet()) { + private void persistMetadataRest( + Context context, T dso, MetadataRest metadataRest, DSpaceObjectService dsoService + ) throws SQLException, AuthorizeException { + for (Map.Entry> entry : metadataRest.getMap().entrySet()) { String[] seq = entry.getKey().split("\\."); String schema = seq[0]; String element = seq[1]; String qualifier = seq.length == 3 ? seq[2] : null; - for (MetadataValueRest mvr: entry.getValue()) { + for (MetadataValueRest mvr : entry.getValue()) { dsoService.addMetadata(context, dso, schema, element, qualifier, mvr.getLanguage(), - mvr.getValue(), mvr.getAuthority(), mvr.getConfidence()); + mvr.getValue(), mvr.getAuthority(), mvr.getConfidence()); } } dsoService.update(context, dso); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RegistrationDataConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RegistrationDataConverter.java new file mode 100644 index 000000000000..3ec5bfbf533c --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RegistrationDataConverter.java @@ -0,0 +1,143 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.converter; + +import java.sql.SQLException; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import javax.servlet.http.HttpServletRequest; + +import org.dspace.app.rest.model.MetadataRest; +import org.dspace.app.rest.model.RegistrationMetadataRest; +import org.dspace.app.rest.model.RegistrationRest; +import org.dspace.app.rest.projection.Projection; +import org.dspace.app.rest.utils.ContextUtil; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.MetadataValue; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.eperson.RegistrationData; +import org.dspace.eperson.RegistrationTypeEnum; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.AccountService; +import org.dspace.eperson.service.RegistrationDataService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +@Component +public class RegistrationDataConverter implements DSpaceConverter { + + @Autowired + private HttpServletRequest request; + + @Autowired + private RegistrationDataService registrationDataService; + + @Override + public RegistrationRest convert(RegistrationData registrationData, Projection projection) { + + if (registrationData == null) { + return null; + } + + Context context = ContextUtil.obtainContext(request); + + AccountService accountService = EPersonServiceFactory.getInstance().getAccountService(); + RegistrationRest registrationRest = new RegistrationRest(); + registrationRest.setId(registrationData.getID()); + registrationRest.setEmail(registrationData.getEmail()); + registrationRest.setNetId(registrationData.getNetId()); + registrationRest.setRegistrationType( + Optional.ofNullable(registrationData.getRegistrationType()) + .map(RegistrationTypeEnum::toString) + .orElse(null) + ); + + EPerson ePerson = null; + try { + ePerson = accountService.getEPerson(context, registrationData.getToken()); + if (ePerson == null && registrationData.getRegistrationType().equals(RegistrationTypeEnum.ORCID)) { + ePerson = context.getCurrentUser(); + } + } catch (SQLException | AuthorizeException e) { + throw new RuntimeException(e); + } + + if (ePerson != null) { + registrationRest.setUser(ePerson.getID()); + try { + MetadataRest metadataRest = getMetadataRest(ePerson, registrationData); + if (registrationData.getEmail() != null) { + metadataRest.put( + "email", + new RegistrationMetadataRest(registrationData.getEmail(), ePerson.getEmail()) + ); + } + registrationRest.setRegistrationMetadata(metadataRest); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } else { + registrationRest.setRegistrationMetadata(getMetadataRest(registrationData)); + } + + registrationRest.setGroupNames(getGroupNames(registrationData)); + registrationRest.setGroups( + registrationData.getGroups().stream().map(Group::getID).collect(Collectors.toList()) + ); + return registrationRest; + } + + + private MetadataRest getMetadataRest(EPerson ePerson, RegistrationData registrationData) + throws SQLException { + return registrationDataService.groupEpersonMetadataByRegistrationData(ePerson, registrationData) + .reduce( + new MetadataRest<>(), + (map, entry) -> map.put( + entry.getKey().getMetadataField().toString('.'), + new RegistrationMetadataRest( + entry.getKey().getValue(), + entry.getValue().map(MetadataValue::getValue).orElse(null) + ) + ), + (m1, m2) -> { + m1.getMap().putAll(m2.getMap()); + return m1; + } + ); + } + + private MetadataRest getMetadataRest(RegistrationData registrationData) { + MetadataRest metadataRest = new MetadataRest<>(); + registrationData.getMetadata().forEach( + (m) -> metadataRest.put( + m.getMetadataField().toString('.'), + new RegistrationMetadataRest(m.getValue()) + ) + ); + return metadataRest; + } + + private List getGroupNames(RegistrationData registrationData) { + return registrationData.getGroups().stream() + .map(Group::getName) + .collect(Collectors.toList()); + } + + @Override + public Class getModelClass() { + return RegistrationData.class; + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RootConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RootConverter.java index 61f18a5b3c9c..1d81e308e39f 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RootConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RootConverter.java @@ -10,6 +10,7 @@ import static org.dspace.app.util.Util.getSourceVersion; import org.dspace.app.rest.model.RootRest; +import org.dspace.core.CrisConstants; import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -28,7 +29,8 @@ public RootRest convert() { rootRest.setDspaceName(configurationService.getProperty("dspace.name")); rootRest.setDspaceUI(configurationService.getProperty("dspace.ui.url")); rootRest.setDspaceServer(configurationService.getProperty("dspace.server.url")); - rootRest.setDspaceVersion("DSpace " + getSourceVersion()); + rootRest.setDspaceVersion(CrisConstants.DSPACE_BASE_VERSION); + rootRest.setCrisVersion(getSourceVersion()); return rootRest; } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java index 470a3ac3425b..126d37ba1ace 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java @@ -7,15 +7,20 @@ */ package org.dspace.app.rest.converter; +import java.sql.SQLException; +import java.util.Arrays; import java.util.LinkedList; import java.util.List; import javax.servlet.http.HttpServletRequest; +import org.apache.log4j.Logger; import org.dspace.app.rest.model.PageRest; import org.dspace.app.rest.model.SearchEventRest; import org.dspace.app.rest.model.SearchResultsRest; import org.dspace.app.rest.utils.ScopeResolver; +import org.dspace.app.util.service.DSpaceObjectUtils; import org.dspace.content.DSpaceObject; +import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.discovery.IndexableObject; import org.dspace.usage.UsageEvent; @@ -25,15 +30,39 @@ @Component public class SearchEventConverter { + /* Log4j logger */ + private static final Logger log = Logger.getLogger(SearchEventConverter.class); @Autowired private ScopeResolver scopeResolver; + @Autowired + private DSpaceObjectUtils dSpaceObjectUtils; + + private final Integer[] allowedClickedObjectTypes = + new Integer[]{Constants.COMMUNITY, Constants.COLLECTION, Constants.ITEM}; + public UsageSearchEvent convert(Context context, HttpServletRequest request, SearchEventRest searchEventRest) { UsageSearchEvent usageSearchEvent = new UsageSearchEvent(UsageEvent.Action.SEARCH, request, context, null); usageSearchEvent.setQuery(searchEventRest.getQuery()); usageSearchEvent.setDsoType(searchEventRest.getDsoType()); + if (searchEventRest.getClickedObject() != null) { + try { + DSpaceObject clickedObject = + dSpaceObjectUtils.findDSpaceObject(context, searchEventRest.getClickedObject()); + if (clickedObject != null && + Arrays.asList(allowedClickedObjectTypes).contains(clickedObject.getType())) { + usageSearchEvent.setObject(clickedObject); + } else { + throw new IllegalArgumentException("UUID " + searchEventRest.getClickedObject() + + " was expected to resolve to a Community, Collection or Item, but didn't resolve to any"); + } + } catch (SQLException e) { + log.warn("Unable to retrieve DSpace Object with ID " + searchEventRest.getClickedObject() + + " from the database", e); + } + } if (searchEventRest.getScope() != null) { IndexableObject scopeObject = scopeResolver.resolveScope(context, String.valueOf(searchEventRest.getScope())); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionAccessOptionConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionAccessOptionConverter.java index 77cac04918bf..7976c3105a76 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionAccessOptionConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionAccessOptionConverter.java @@ -6,7 +6,9 @@ * http://www.dspace.org/license/ */ package org.dspace.app.rest.converter; + import java.text.ParseException; +import java.util.Date; import org.apache.commons.lang3.StringUtils; import org.dspace.app.rest.model.AccessConditionOptionRest; @@ -15,6 +17,7 @@ import org.dspace.submit.model.AccessConditionConfiguration; import org.dspace.submit.model.AccessConditionOption; import org.dspace.util.DateMathParser; +import org.dspace.util.TimeHelpers; import org.springframework.stereotype.Component; /** @@ -27,8 +30,6 @@ public class SubmissionAccessOptionConverter implements DSpaceConverter { - DateMathParser dateMathParser = new DateMathParser(); - @Override public SubmissionAccessOptionRest convert(AccessConditionConfiguration config, Projection projection) { SubmissionAccessOptionRest model = new SubmissionAccessOptionRest(); @@ -36,13 +37,15 @@ public SubmissionAccessOptionRest convert(AccessConditionConfiguration config, P model.setCanChangeDiscoverable(config.getCanChangeDiscoverable()); model.setSingleAccessCondition(config.getSingleAccessCondition()); model.setProjection(projection); + DateMathParser dateMathParser = new DateMathParser(); for (AccessConditionOption option : config.getOptions()) { AccessConditionOptionRest optionRest = new AccessConditionOptionRest(); optionRest.setHasStartDate(option.getHasStartDate()); optionRest.setHasEndDate(option.getHasEndDate()); if (StringUtils.isNotBlank(option.getStartDateLimit())) { try { - optionRest.setMaxStartDate(dateMathParser.parseMath(option.getStartDateLimit())); + Date requested = dateMathParser.parseMath(option.getStartDateLimit()); + optionRest.setMaxStartDate(TimeHelpers.toMidnightUTC(requested)); } catch (ParseException e) { throw new IllegalStateException("Wrong start date limit configuration for the access condition " + "option named " + option.getName()); @@ -50,7 +53,8 @@ public SubmissionAccessOptionRest convert(AccessConditionConfiguration config, P } if (StringUtils.isNotBlank(option.getEndDateLimit())) { try { - optionRest.setMaxEndDate(dateMathParser.parseMath(option.getEndDateLimit())); + Date requested = dateMathParser.parseMath(option.getEndDateLimit()); + optionRest.setMaxEndDate(TimeHelpers.toMidnightUTC(requested)); } catch (ParseException e) { throw new IllegalStateException("Wrong end date limit configuration for the access condition " + "option named " + option.getName()); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionDefinitionConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionDefinitionConverter.java index eccd9cba41eb..a84545520463 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionDefinitionConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionDefinitionConverter.java @@ -80,7 +80,7 @@ public SubmissionDefinitionRest convert(SubmissionConfig obj, Projection project Context context = null; try { context = ContextUtil.obtainContext(request); - List collections = panelConverter.getSubmissionConfigReader() + List collections = panelConverter.getSubmissionConfigService() .getCollectionsBySubmissionConfig(context, obj.getSubmissionName()); DSpaceConverter cc = converter.getConverter(Collection.class); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionFormConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionFormConverter.java index e18f1761c0a9..69a46350ad8a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionFormConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionFormConverter.java @@ -131,7 +131,7 @@ private SubmissionFormFieldRest getField(DCInput dcinput, String formName) { dcinput.getVocabulary(), formName)); selMd.setClosed( isClosed(dcinput.getSchema(), dcinput.getElement(), dcinput.getQualifier(), - dcinput.getPairsType(), dcinput.getVocabulary())); + dcinput.getPairsType(), dcinput.getVocabulary(), dcinput.isClosedVocabulary())); } else { inputRest.setType(inputType); } @@ -157,7 +157,7 @@ private SubmissionFormFieldRest getField(DCInput dcinput, String formName) { dcinput.getQualifier(), dcinput.getPairsType(), dcinput.getVocabulary(), formName)); selMd.setClosed(isClosed(dcinput.getSchema(), dcinput.getElement(), - dcinput.getQualifier(), null, dcinput.getVocabulary())); + dcinput.getQualifier(), null, dcinput.getVocabulary(), dcinput.isClosedVocabulary())); } selMd.setMetadata(org.dspace.core.Utils .standardize(dcinput.getSchema(), dcinput.getElement(), dcinput.getQualifier(), ".")); @@ -176,7 +176,7 @@ private SubmissionFormFieldRest getField(DCInput dcinput, String formName) { selMd.setControlledVocabulary(getAuthorityName(dcinput.getSchema(), dcinput.getElement(), pairs.get(idx + 1), dcinput.getPairsType(), dcinput.getVocabulary(), formName)); selMd.setClosed(isClosed(dcinput.getSchema(), dcinput.getElement(), - dcinput.getQualifier(), null, dcinput.getVocabulary())); + dcinput.getQualifier(), null, dcinput.getVocabulary(), dcinput.isClosedVocabulary())); } selectableMetadata.add(selMd); } @@ -257,9 +257,11 @@ private String getAuthorityName(String schema, String element, String qualifier, } private boolean isClosed(String schema, String element, String qualifier, String valuePairsName, - String vocabularyName) { - if (StringUtils.isNotBlank(valuePairsName) || StringUtils.isNotBlank(vocabularyName)) { + String vocabularyName, boolean isClosedVocabulary) { + if (StringUtils.isNotBlank(valuePairsName)) { return true; + } else if (StringUtils.isNotBlank(vocabularyName)) { + return isClosedVocabulary; } return authorityUtils.isClosed(schema, element, qualifier); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionSectionConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionSectionConverter.java index 2d232e6fa4c7..d9ab0a3f10d5 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionSectionConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionSectionConverter.java @@ -7,15 +7,18 @@ */ package org.dspace.app.rest.converter; +import java.sql.SQLException; + import org.apache.logging.log4j.Logger; import org.dspace.app.rest.model.ScopeEnum; import org.dspace.app.rest.model.SubmissionSectionRest; import org.dspace.app.rest.model.SubmissionVisibilityRest; import org.dspace.app.rest.model.VisibilityEnum; import org.dspace.app.rest.projection.Projection; -import org.dspace.app.util.SubmissionConfigReader; import org.dspace.app.util.SubmissionConfigReaderException; import org.dspace.app.util.SubmissionStepConfig; +import org.dspace.submit.factory.SubmissionServiceFactory; +import org.dspace.submit.service.SubmissionConfigService; import org.springframework.stereotype.Component; /** @@ -29,7 +32,7 @@ public class SubmissionSectionConverter implements DSpaceConverter getModelClass() { return SubmissionStepConfig.class; } - public SubmissionConfigReader getSubmissionConfigReader() throws SubmissionConfigReaderException { - if (submissionConfigReader == null) { - submissionConfigReader = new SubmissionConfigReader(); + public SubmissionConfigService getSubmissionConfigService() + throws SubmissionConfigReaderException, SQLException, IllegalStateException { + if (submissionConfigService == null) { + submissionConfigService = SubmissionServiceFactory.getInstance().getSubmissionConfigService(); } - return submissionConfigReader; + return submissionConfigService; } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java index edefd7434e76..f42628c96f0a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java @@ -48,6 +48,7 @@ import org.springframework.web.bind.annotation.ExceptionHandler; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.context.request.WebRequest; +import org.springframework.web.multipart.MaxUploadSizeExceededException; import org.springframework.web.multipart.MultipartException; import org.springframework.web.servlet.mvc.method.annotation.ResponseEntityExceptionHandler; @@ -110,6 +111,13 @@ protected void handleWrongRequestException(HttpServletRequest request, HttpServl sendErrorResponse(request, response, ex, "Request is invalid or incorrect", HttpServletResponse.SC_BAD_REQUEST); } + @ExceptionHandler(MaxUploadSizeExceededException.class) + protected void handleMaxUploadSizeExceededException(HttpServletRequest request, HttpServletResponse response, + Exception ex) throws IOException { + sendErrorResponse(request, response, ex, "Request entity is too large", + HttpServletResponse.SC_REQUEST_ENTITY_TOO_LARGE); + } + @ExceptionHandler(SQLException.class) protected void handleSQLException(HttpServletRequest request, HttpServletResponse response, Exception ex) throws IOException { @@ -137,7 +145,7 @@ protected void handleUnprocessableEntityException(HttpServletRequest request, Ht Exception ex) throws IOException { //422 is not defined in HttpServletResponse. Its meaning is "Unprocessable Entity". //Using the value from HttpStatus. - sendErrorResponse(request, response, null, + sendErrorResponse(request, response, ex, "Unprocessable or invalid entity", HttpStatus.UNPROCESSABLE_ENTITY.value()); } @@ -145,7 +153,7 @@ protected void handleUnprocessableEntityException(HttpServletRequest request, Ht @ExceptionHandler( {InvalidSearchRequestException.class}) protected void handleInvalidSearchRequestException(HttpServletRequest request, HttpServletResponse response, Exception ex) throws IOException { - sendErrorResponse(request, response, null, + sendErrorResponse(request, response, ex, "Invalid search request", HttpStatus.UNPROCESSABLE_ENTITY.value()); } @@ -179,12 +187,13 @@ protected void handleOrcidValidationException(HttpServletRequest request, HttpSe GroupNameNotProvidedException.class, GroupHasPendingWorkflowTasksException.class, PasswordNotValidException.class, + RESTBitstreamNotFoundException.class }) protected void handleCustomUnprocessableEntityException(HttpServletRequest request, HttpServletResponse response, TranslatableException ex) throws IOException { Context context = ContextUtil.obtainContext(request); sendErrorResponse( - request, response, null, ex.getLocalizedMessage(context), HttpStatus.UNPROCESSABLE_ENTITY.value() + request, response, (Exception) ex, ex.getLocalizedMessage(context), HttpStatus.UNPROCESSABLE_ENTITY.value() ); } @@ -200,7 +209,7 @@ protected ResponseEntity handleCustomUnprocessableEditException(HttpServ protected void ParameterConversionException(HttpServletRequest request, HttpServletResponse response, Exception ex) throws IOException { // we want the 400 status for missing parameters, see https://jira.lyrasis.org/browse/DS-4428 - sendErrorResponse(request, response, null, + sendErrorResponse(request, response, ex, "A required parameter is invalid", HttpStatus.BAD_REQUEST.value()); } @@ -209,7 +218,7 @@ protected void ParameterConversionException(HttpServletRequest request, HttpServ protected void MissingParameterException(HttpServletRequest request, HttpServletResponse response, Exception ex) throws IOException { // we want the 400 status for missing parameters, see https://jira.lyrasis.org/browse/DS-4428 - sendErrorResponse(request, response, null, + sendErrorResponse(request, response, ex, "A required parameter is missing", HttpStatus.BAD_REQUEST.value()); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/RESTBitstreamNotFoundException.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/RESTBitstreamNotFoundException.java new file mode 100644 index 000000000000..a0b48e3c0dfc --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/RESTBitstreamNotFoundException.java @@ -0,0 +1,51 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.exception; + +import java.text.MessageFormat; + +import org.dspace.core.Context; +import org.dspace.core.I18nUtil; + +/** + *

      Extend {@link UnprocessableEntityException} to provide a specific error message + * in the REST response. The error message is added to the response in + * {@link DSpaceApiExceptionControllerAdvice#handleCustomUnprocessableEntityException}, + * hence it should not contain sensitive or security-compromising info.

      + * + * @author Jens Vannerum (jens.vannerum@atmire.com) + */ +public class RESTBitstreamNotFoundException extends UnprocessableEntityException implements TranslatableException { + + public static String uuid; + + /** + * @param formatStr string with placeholders, ideally obtained using {@link I18nUtil} + * @return message with bitstream id substituted + */ + private static String formatMessage(String formatStr) { + MessageFormat fmt = new MessageFormat(formatStr); + return fmt.format(new String[]{uuid}); + } + + public static final String MESSAGE_KEY = "org.dspace.app.rest.exception.RESTBitstreamNotFoundException.message"; + + public RESTBitstreamNotFoundException(String uuid) { + super(formatMessage(I18nUtil.getMessage(MESSAGE_KEY))); + RESTBitstreamNotFoundException.uuid = uuid; + } + + public String getMessageKey() { + return MESSAGE_KEY; + } + + public String getLocalizedMessage(Context context) { + return formatMessage(I18nUtil.getMessage(MESSAGE_KEY, context)); + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/BrowseEntryHalLinkFactory.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/BrowseEntryHalLinkFactory.java index ee70dbf43132..9e515984fe03 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/BrowseEntryHalLinkFactory.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/BrowseEntryHalLinkFactory.java @@ -37,11 +37,11 @@ protected void addLinks(final BrowseEntryResource halResource, final Pageable pa UriComponentsBuilder baseLink = uriBuilder( getMethodOn(bix.getCategory(), bix.getType()).findRel(null, null, bix.getCategory(), English.plural(bix.getType()), bix.getId(), - BrowseIndexRest.ITEMS, null, null)); + BrowseIndexRest.LINK_ITEMS, null, null)); addFilterParams(baseLink, data); - list.add(buildLink(BrowseIndexRest.ITEMS, + list.add(buildLink(BrowseIndexRest.LINK_ITEMS, baseLink.build().encode().toUriString())); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/DSpaceResourceHalLinkFactory.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/DSpaceResourceHalLinkFactory.java index c306691eb352..30404e030ab6 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/DSpaceResourceHalLinkFactory.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/DSpaceResourceHalLinkFactory.java @@ -21,6 +21,8 @@ import org.dspace.app.rest.model.hateoas.DSpaceResource; import org.dspace.app.rest.utils.Utils; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.Ordered; +import org.springframework.core.annotation.Order; import org.springframework.data.domain.Pageable; import org.springframework.hateoas.IanaLinkRelations; import org.springframework.hateoas.Link; @@ -33,6 +35,7 @@ * @author Tom Desair (tom dot desair at atmire dot com) */ @Component +@Order(Ordered.HIGHEST_PRECEDENCE) public class DSpaceResourceHalLinkFactory extends HalLinkFactory { @Autowired diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/BrowseIndexRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/BrowseIndexRest.java index 9fee6cbdbad2..f7978f00fdf5 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/BrowseIndexRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/BrowseIndexRest.java @@ -10,6 +10,7 @@ import java.util.List; import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.dspace.app.rest.RestResourceController; @@ -20,11 +21,11 @@ */ @LinksRest(links = { @LinkRest( - name = BrowseIndexRest.ITEMS, + name = BrowseIndexRest.LINK_ITEMS, method = "listBrowseItems" ), @LinkRest( - name = BrowseIndexRest.ENTRIES, + name = BrowseIndexRest.LINK_ENTRIES, method = "listBrowseEntries" ) }) @@ -35,20 +36,38 @@ public class BrowseIndexRest extends BaseObjectRest { public static final String CATEGORY = RestAddressableModel.DISCOVER; - public static final String ITEMS = "items"; - public static final String ENTRIES = "entries"; - - boolean metadataBrowse; - + public static final String LINK_ITEMS = "items"; + public static final String LINK_ENTRIES = "entries"; + public static final String LINK_VOCABULARY = "vocabulary"; + + // if the browse index has two levels, the 1st level shows the list of entries like author names, subjects, types, + // etc. the second level is the actual list of items linked to a specific entry + public static final String BROWSE_TYPE_VALUE_LIST = "valueList"; + // if the browse index has one level: the full list of items + public static final String BROWSE_TYPE_FLAT = "flatBrowse"; + // if the browse index should display the vocabulary tree. The 1st level shows the tree. + // The second level is the actual list of items linked to a specific entry + public static final String BROWSE_TYPE_HIERARCHICAL = "hierarchicalBrowse"; + + // Shared fields + String browseType; @JsonProperty(value = "metadata") List metadataList; + // Single browse index fields + @JsonInclude(JsonInclude.Include.NON_NULL) String dataType; - + @JsonInclude(JsonInclude.Include.NON_NULL) List sortOptions; - + @JsonInclude(JsonInclude.Include.NON_NULL) String order; + // Hierarchical browse fields + @JsonInclude(JsonInclude.Include.NON_NULL) + String facetType; + @JsonInclude(JsonInclude.Include.NON_NULL) + String vocabulary; + @JsonIgnore @Override public String getCategory() { @@ -60,14 +79,6 @@ public String getType() { return NAME; } - public boolean isMetadataBrowse() { - return metadataBrowse; - } - - public void setMetadataBrowse(boolean metadataBrowse) { - this.metadataBrowse = metadataBrowse; - } - public List getMetadataList() { return metadataList; } @@ -100,6 +111,38 @@ public void setSortOptions(List sortOptions) { this.sortOptions = sortOptions; } + /** + * - valueList => if the browse index has two levels, the 1st level shows the list of entries like author names, + * subjects, types, etc. the second level is the actual list of items linked to a specific entry + * - flatBrowse if the browse index has one level: the full list of items + * - hierarchicalBrowse if the browse index should display the vocabulary tree. The 1st level shows the tree. + * The second level is the actual list of items linked to a specific entry + */ + public void setBrowseType(String browseType) { + this.browseType = browseType; + } + + public String getBrowseType() { + return browseType; + } + + public void setFacetType(String facetType) { + this.facetType = facetType; + } + + public String getFacetType() { + return facetType; + } + + public void setVocabulary(String vocabulary) { + this.vocabulary = vocabulary; + } + + + public String getVocabulary() { + return vocabulary; + } + @Override public Class getController() { return RestResourceController.class; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/BulkAccessConditionRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/BulkAccessConditionRest.java new file mode 100644 index 000000000000..97d35117d1da --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/BulkAccessConditionRest.java @@ -0,0 +1,84 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.model; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import org.dspace.app.rest.RestResourceController; + +/** + * The Bulk Access Condition Configuration REST Resource + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class BulkAccessConditionRest extends BaseObjectRest { + + private static final long serialVersionUID = -7708437586052984082L; + + public static final String NAME = "bulkaccessconditionoption"; + public static final String PLURAL = "bulkaccessconditionoptions"; + public static final String CATEGORY = RestAddressableModel.CONFIGURATION; + + private String id; + + private List itemAccessConditionOptions; + + private List bitstreamAccessConditionOptions; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public List getItemAccessConditionOptions() { + if (Objects.isNull(itemAccessConditionOptions)) { + itemAccessConditionOptions = new ArrayList<>(); + } + return itemAccessConditionOptions; + } + + public void setItemAccessConditionOptions( + List itemAccessConditionOptions) { + this.itemAccessConditionOptions = itemAccessConditionOptions; + } + + public List getBitstreamAccessConditionOptions() { + if (Objects.isNull(bitstreamAccessConditionOptions)) { + bitstreamAccessConditionOptions = new ArrayList<>(); + } + return bitstreamAccessConditionOptions; + } + + public void setBitstreamAccessConditionOptions( + List bitstreamAccessConditionOptions) { + this.bitstreamAccessConditionOptions = bitstreamAccessConditionOptions; + } + + @Override + public String getType() { + return NAME; + } + + @Override + public String getCategory() { + return CATEGORY; + } + + @Override + @JsonIgnore + @SuppressWarnings("rawtypes") + public Class getController() { + return RestResourceController.class; + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CollectionRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CollectionRest.java index 1de4ec632cff..3f5ae3bb34c2 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CollectionRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CollectionRest.java @@ -74,4 +74,16 @@ public String getCategory() { public String getType() { return NAME; } + + private int archivedItemsCount; + + public int getArchivedItemsCount() { + return archivedItemsCount; + } + + public void setArchivedItemsCount(int archivedItemsCount) { + this.archivedItemsCount = archivedItemsCount; + } + + } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CommunityRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CommunityRest.java index f8ccbad10e62..86dc4b2c3900 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CommunityRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CommunityRest.java @@ -58,4 +58,14 @@ public String getCategory() { public String getType() { return NAME; } + + private int archivedItemsCount; + + public int getArchivedItemsCount() { + return archivedItemsCount; + } + + public void setArchivedItemsCount(int archivedItemsCount) { + this.archivedItemsCount = archivedItemsCount; + } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CrisLayoutTabRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CrisLayoutTabRest.java index 72803cd751b8..9bebef4f9c33 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CrisLayoutTabRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CrisLayoutTabRest.java @@ -18,7 +18,7 @@ /** * The CrisLayoutTab REST Resource - * + * * @author Danilo Di Nuzzo (danilo.dinuzzo at 4science.it) * */ @@ -39,6 +39,7 @@ public class CrisLayoutTabRest extends BaseObjectRest { private String shortname; private String header; private String entityType; + private String customFilter; private Integer priority; private Integer security; private Boolean leading; @@ -102,6 +103,14 @@ public void setEntityType(String entityType) { this.entityType = entityType; } + public String getCustomFilter() { + return customFilter; + } + + public void setCustomFilter(String customFilter) { + this.customFilter = customFilter; + } + public Integer getPriority() { return priority; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/DSpaceObjectRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/DSpaceObjectRest.java index 1b71eb8957a2..e7b43ebe33c2 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/DSpaceObjectRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/DSpaceObjectRest.java @@ -20,7 +20,7 @@ public abstract class DSpaceObjectRest extends BaseObjectRest { private String name; private String handle; - MetadataRest metadata = new MetadataRest(); + MetadataRest metadata = new MetadataRest<>(); @Override public String getId() { @@ -56,11 +56,11 @@ public void setHandle(String handle) { * * @return the metadata. */ - public MetadataRest getMetadata() { + public MetadataRest getMetadata() { return metadata; } - public void setMetadata(MetadataRest metadata) { + public void setMetadata(MetadataRest metadata) { this.metadata = metadata; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/ExternalSourceEntryRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/ExternalSourceEntryRest.java index 06af7e222713..4e578c313870 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/ExternalSourceEntryRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/ExternalSourceEntryRest.java @@ -7,6 +7,8 @@ */ package org.dspace.app.rest.model; +import java.util.List; + import org.dspace.app.rest.ExternalSourcesRestController; /** @@ -38,6 +40,7 @@ public String getType() { private String value; private String externalSource; private MetadataRest metadata = new MetadataRest(); + private List matchObjects; /** * Generic getter for the id @@ -118,4 +121,12 @@ public MetadataRest getMetadata() { public void setMetadata(MetadataRest metadata) { this.metadata = metadata; } + + public List getMatchObjects() { + return matchObjects; + } + + public void setMatchObjects(List matchObjects) { + this.matchObjects = matchObjects; + } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/MetadataRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/MetadataRest.java index d1367c8fea82..072acbcfd71e 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/MetadataRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/MetadataRest.java @@ -19,10 +19,10 @@ /** * Rest representation of a map of metadata keys to ordered lists of values. */ -public class MetadataRest { +public class MetadataRest { @JsonAnySetter - private SortedMap> map = new TreeMap(); + private SortedMap> map = new TreeMap(); /** * Gets the map. @@ -30,7 +30,7 @@ public class MetadataRest { * @return the map of keys to ordered values. */ @JsonAnyGetter - public SortedMap> getMap() { + public SortedMap> getMap() { return map; } @@ -44,16 +44,16 @@ public SortedMap> getMap() { * they are passed to this method. * @return this instance, to support chaining calls for easy initialization. */ - public MetadataRest put(String key, MetadataValueRest... values) { + public MetadataRest put(String key, T... values) { // determine highest explicitly ordered value int highest = -1; - for (MetadataValueRest value : values) { + for (T value : values) { if (value.getPlace() > highest) { highest = value.getPlace(); } } // add any non-explicitly ordered values after highest - for (MetadataValueRest value : values) { + for (T value : values) { if (value.getPlace() < 0) { highest++; value.setPlace(highest); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RegistrationMetadataRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RegistrationMetadataRest.java new file mode 100644 index 000000000000..370bd9027f62 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RegistrationMetadataRest.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.model; + +import com.fasterxml.jackson.annotation.JsonInclude; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public class RegistrationMetadataRest extends MetadataValueRest { + + @JsonInclude(JsonInclude.Include.NON_NULL) + private String overrides; + + public RegistrationMetadataRest(String value, String overrides) { + super(); + this.value = value; + this.overrides = overrides; + } + + public RegistrationMetadataRest(String value) { + this(value, null); + } + + public String getOverrides() { + return overrides; + } + + public void setOverrides(String overrides) { + this.overrides = overrides; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RegistrationRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RegistrationRest.java index 191aec88a414..7285a01a4a24 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RegistrationRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RegistrationRest.java @@ -27,14 +27,28 @@ public class RegistrationRest extends RestAddressableModel { public static final String NAME_PLURAL = "registrations"; public static final String CATEGORY = EPERSON; + private Integer id; private String email; private UUID user; + private String registrationType; + private String netId; + @JsonInclude(JsonInclude.Include.NON_NULL) + private MetadataRest registrationMetadata; @JsonInclude(JsonInclude.Include.NON_NULL) private List groupNames = Collections.emptyList(); - private List groups = Collections.emptyList(); + + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + /** * Generic getter for the email + * * @return the email value of this RegisterRest */ public String getEmail() { @@ -43,7 +57,8 @@ public String getEmail() { /** * Generic setter for the email - * @param email The email to be set on this RegisterRest + * + * @param email The email to be set on this RegisterRest */ public void setEmail(String email) { this.email = email; @@ -51,6 +66,7 @@ public void setEmail(String email) { /** * Generic getter for the user + * * @return the user value of this RegisterRest */ public UUID getUser() { @@ -59,12 +75,38 @@ public UUID getUser() { /** * Generic setter for the user - * @param user The user to be set on this RegisterRest + * + * @param user The user to be set on this RegisterRest */ public void setUser(UUID user) { this.user = user; } + public String getRegistrationType() { + return registrationType; + } + + public void setRegistrationType(String registrationType) { + this.registrationType = registrationType; + } + + public String getNetId() { + return netId; + } + + public void setNetId(String netId) { + this.netId = netId; + } + + public MetadataRest getRegistrationMetadata() { + return registrationMetadata; + } + + public void setRegistrationMetadata( + MetadataRest registrationMetadata) { + this.registrationMetadata = registrationMetadata; + } + public List getGroups() { return groups; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RootRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RootRest.java index cef8965601ca..9fd6a1263423 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RootRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RootRest.java @@ -21,6 +21,7 @@ public class RootRest extends RestAddressableModel { private String dspaceName; private String dspaceServer; private String dspaceVersion; + private String crisVersion; public String getCategory() { return CATEGORY; @@ -67,6 +68,14 @@ public void setDspaceVersion(String dspaceVersion) { this.dspaceVersion = dspaceVersion; } + public String getCrisVersion() { + return crisVersion; + } + + public void setCrisVersion(String crisVersion) { + this.crisVersion = crisVersion; + } + @Override public boolean equals(Object object) { return (object instanceof RootRest && @@ -76,6 +85,7 @@ public boolean equals(Object object) { .append(this.getDspaceUI(), ((RootRest) object).getDspaceUI()) .append(this.getDspaceName(), ((RootRest) object).getDspaceName()) .append(this.getDspaceServer(), ((RootRest) object).getDspaceServer()) + .append(this.getCrisVersion(), ((RootRest)object).getCrisVersion()) .isEquals()); } @@ -88,6 +98,7 @@ public int hashCode() { .append(this.getDspaceName()) .append(this.getDspaceUI()) .append(this.getDspaceServer()) + .append(this.getCrisVersion()) .toHashCode(); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchConfigurationRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchConfigurationRest.java index 7ec1b2250092..b25d827e75c1 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchConfigurationRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchConfigurationRest.java @@ -31,6 +31,8 @@ public class SearchConfigurationRest extends BaseObjectRest { private List filters = new LinkedList<>(); private List sortOptions = new LinkedList<>(); + private SortOption defaultSortOption; + public String getCategory() { return CATEGORY; } @@ -75,6 +77,14 @@ public List getSortOptions() { return sortOptions; } + public SortOption getDefaultSortOption() { + return defaultSortOption; + } + + public void setDefaultSortOption(SortOption defaultSortOption) { + this.defaultSortOption = defaultSortOption; + } + @Override public boolean equals(Object object) { return (object instanceof SearchConfigurationRest && diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java index e029dbaf9919..46827711f2ea 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java @@ -25,6 +25,7 @@ public class SearchEventRest extends BaseObjectRest { private UUID scope; private String configuration; private String dsoType; + private UUID clickedObject; private List appliedFilters; private SearchResultsRest.Sorting sort; private PageRest page; @@ -97,4 +98,12 @@ public String getDsoType() { public void setDsoType(String dsoType) { this.dsoType = dsoType; } + + public UUID getClickedObject() { + return clickedObject; + } + + public void setClickedObject(UUID clickedObject) { + this.clickedObject = clickedObject; + } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/ViewEventRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/ViewEventRest.java index 351a32eab0f1..897a3f86ae99 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/ViewEventRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/ViewEventRest.java @@ -23,6 +23,7 @@ public class ViewEventRest extends BaseObjectRest { private UUID targetId; private String targetType; + private String referrer; @Override @JsonIgnore @@ -46,6 +47,14 @@ public void setTargetType(String targetType) { this.targetType = targetType; } + public String getReferrer() { + return referrer; + } + + public void setReferrer(String referrer) { + this.referrer = referrer; + } + public String getCategory() { return CATEGORY; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BrowseIndexResource.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BrowseIndexResource.java index f6c821595f55..61158704ea5a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BrowseIndexResource.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BrowseIndexResource.java @@ -7,9 +7,20 @@ */ package org.dspace.app.rest.model.hateoas; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + +import org.atteo.evo.inflector.English; +import org.dspace.app.rest.RestResourceController; import org.dspace.app.rest.model.BrowseIndexRest; +import org.dspace.app.rest.model.VocabularyRest; import org.dspace.app.rest.model.hateoas.annotations.RelNameDSpaceResource; import org.dspace.app.rest.utils.Utils; +import org.dspace.content.authority.ChoiceAuthority; +import org.dspace.content.authority.factory.ContentAuthorityServiceFactory; +import org.dspace.content.authority.service.ChoiceAuthorityService; +import org.springframework.hateoas.Link; +import org.springframework.web.util.UriComponentsBuilder; /** * Browse Index Rest HAL Resource. The HAL Resource wraps the REST Resource @@ -19,15 +30,32 @@ */ @RelNameDSpaceResource(BrowseIndexRest.NAME) public class BrowseIndexResource extends DSpaceResource { + + public BrowseIndexResource(BrowseIndexRest bix, Utils utils) { super(bix, utils); // TODO: the following code will force the embedding of items and // entries in the browseIndex we need to find a way to populate the rels // array from the request/projection right now it is always null // super(bix, utils, "items", "entries"); - if (bix.isMetadataBrowse()) { - add(utils.linkToSubResource(bix, BrowseIndexRest.ENTRIES)); + if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_VALUE_LIST)) { + add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ENTRIES)); + add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ITEMS)); + } + if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_FLAT)) { + add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ITEMS)); + } + if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL)) { + ChoiceAuthorityService choiceAuthorityService = + ContentAuthorityServiceFactory.getInstance().getChoiceAuthorityService(); + ChoiceAuthority source = choiceAuthorityService.getChoiceAuthorityByAuthorityName(bix.getVocabulary()); + UriComponentsBuilder baseLink = linkTo( + methodOn(RestResourceController.class, VocabularyRest.AUTHENTICATION).findRel(null, + null, VocabularyRest.CATEGORY, + English.plural(VocabularyRest.NAME), source.getPluginInstanceName(), + "", null, null)).toUriComponentsBuilder(); + + add(Link.of(baseLink.build().encode().toUriString(), BrowseIndexRest.LINK_VOCABULARY)); } - add(utils.linkToSubResource(bix, BrowseIndexRest.ITEMS)); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BulkAccessConditionResource.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BulkAccessConditionResource.java new file mode 100644 index 000000000000..2d3440b29919 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BulkAccessConditionResource.java @@ -0,0 +1,27 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.model.hateoas; +import org.dspace.app.rest.model.BulkAccessConditionRest; +import org.dspace.app.rest.model.hateoas.annotations.RelNameDSpaceResource; +import org.dspace.app.rest.utils.Utils; + +/** + * BulkAccessCondition HAL Resource. + * This resource adds the data from the REST object together with embedded objects + * and a set of links if applicable. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +@RelNameDSpaceResource(BulkAccessConditionRest.NAME) +public class BulkAccessConditionResource extends DSpaceResource { + + public BulkAccessConditionResource(BulkAccessConditionRest data, Utils utils) { + super(data, utils); + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java index a3590868b8be..50952af0c0b2 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java @@ -21,9 +21,12 @@ import javax.annotation.Nullable; import javax.servlet.http.HttpServletRequest; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.lang3.StringUtils; import org.dspace.app.rest.Parameter; import org.dspace.app.rest.SearchRestMethod; +import org.dspace.app.rest.converter.JsonPatchConverter; import org.dspace.app.rest.exception.DSpaceBadRequestException; import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException; import org.dspace.app.rest.exception.UnprocessableEntityException; @@ -46,6 +49,7 @@ import org.dspace.core.Context; import org.dspace.core.exception.SQLRuntimeException; import org.dspace.handle.service.HandleService; +import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; @@ -83,6 +87,9 @@ public class BitstreamRestRepository extends DSpaceObjectRestRepository operationsLimit) { + throw new DSpaceBadRequestException("The number of operations in the patch is over the limit of " + + operationsLimit); + } + resourcePatch.patch(obtainContext(), null, patch.getOperations()); + context.commit(); + } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseEntryLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseEntryLinkRepository.java index 93224f78cd53..f608595c3dda 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseEntryLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseEntryLinkRepository.java @@ -40,7 +40,7 @@ * * @author Andrea Bollini (andrea.bollini at 4science.it) */ -@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.ENTRIES) +@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.LINK_ENTRIES) public class BrowseEntryLinkRepository extends AbstractDSpaceRestRepository implements LinkRestRepository { @@ -127,7 +127,8 @@ public Page listBrowseEntries(HttpServletRequest request, Strin @Override public boolean isEmbeddableRelation(Object data, String name) { BrowseIndexRest bir = (BrowseIndexRest) data; - if (bir.isMetadataBrowse() && "entries".equals(name)) { + if (bir.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_VALUE_LIST) && + name.equals(BrowseIndexRest.LINK_ENTRIES)) { return true; } return false; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java index 8ffefb619b47..6aedcee6c0e7 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java @@ -8,6 +8,7 @@ package org.dspace.app.rest.repository; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -17,7 +18,10 @@ import org.dspace.browse.BrowseException; import org.dspace.browse.BrowseIndex; import org.dspace.browse.CrossLinks; +import org.dspace.content.authority.DSpaceControlledVocabularyIndex; +import org.dspace.content.authority.service.ChoiceAuthorityService; import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.security.access.prepost.PreAuthorize; @@ -31,26 +35,48 @@ @Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME) public class BrowseIndexRestRepository extends DSpaceRestRepository { + @Autowired + private ChoiceAuthorityService choiceAuthorityService; + @Override @PreAuthorize("permitAll()") public BrowseIndexRest findOne(Context context, String name) { - BrowseIndexRest bi = null; + BrowseIndexRest bi = createFromMatchingBrowseIndex(name); + if (bi == null) { + bi = createFromMatchingVocabulary(name); + } + + return bi; + } + + private BrowseIndexRest createFromMatchingVocabulary(String name) { + DSpaceControlledVocabularyIndex vocabularyIndex = choiceAuthorityService.getVocabularyIndex(name); + if (vocabularyIndex != null) { + return converter.toRest(vocabularyIndex, utils.obtainProjection()); + } + return null; + } + + private BrowseIndexRest createFromMatchingBrowseIndex(String name) { BrowseIndex bix; try { - bix = BrowseIndex.getBrowseIndex(name); + bix = BrowseIndex.getBrowseIndex(name); } catch (BrowseException e) { throw new RuntimeException(e.getMessage(), e); } if (bix != null) { - bi = converter.toRest(bix, utils.obtainProjection()); + return converter.toRest(bix, utils.obtainProjection()); } - return bi; + return null; } @Override public Page findAll(Context context, Pageable pageable) { try { - List indexes = Arrays.asList(BrowseIndex.getBrowseIndices()); + List indexes = new ArrayList<>(Arrays.asList(BrowseIndex.getBrowseIndices())); + choiceAuthorityService.getChoiceAuthoritiesNames() + .stream().filter(name -> choiceAuthorityService.getVocabularyIndex(name) != null) + .forEach(name -> indexes.add(choiceAuthorityService.getVocabularyIndex(name))); return converter.toRestPage(indexes, pageable, indexes.size(), utils.obtainProjection()); } catch (BrowseException e) { throw new RuntimeException(e.getMessage(), e); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseItemLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseItemLinkRepository.java index 74aa9f38bfec..baa79bc80ae7 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseItemLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseItemLinkRepository.java @@ -42,7 +42,7 @@ * * @author Andrea Bollini (andrea.bollini at 4science.it) */ -@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.ITEMS) +@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.LINK_ITEMS) public class BrowseItemLinkRepository extends AbstractDSpaceRestRepository implements LinkRestRepository { @@ -155,7 +155,8 @@ public Page listBrowseItems(HttpServletRequest request, String browseN @Override public boolean isEmbeddableRelation(Object data, String name) { BrowseIndexRest bir = (BrowseIndexRest) data; - if (!bir.isMetadataBrowse() && "items".equals(name)) { + if (bir.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_FLAT) && + name.equals(BrowseIndexRest.LINK_ITEMS)) { return true; } return false; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BulkAccessConditionRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BulkAccessConditionRestRepository.java new file mode 100644 index 000000000000..2bf25978efc4 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BulkAccessConditionRestRepository.java @@ -0,0 +1,85 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.repository; +import java.sql.SQLException; +import java.util.List; +import java.util.Objects; + +import org.dspace.app.bulkaccesscontrol.model.BulkAccessConditionConfiguration; +import org.dspace.app.bulkaccesscontrol.service.BulkAccessConditionConfigurationService; +import org.dspace.app.rest.exception.RESTAuthorizationException; +import org.dspace.app.rest.model.BulkAccessConditionRest; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.stereotype.Component; + +/** + * This is the repository responsible to manage Bulk Access Condition options + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +@Component(BulkAccessConditionRest.CATEGORY + "." + BulkAccessConditionRest.NAME) +public class BulkAccessConditionRestRepository extends DSpaceRestRepository { + + @Autowired + private BulkAccessConditionConfigurationService bulkAccessConditionConfigurationService; + + @Autowired + private AuthorizeService authorizeService; + + @Override + @PreAuthorize("permitAll()") + public BulkAccessConditionRest findOne(Context context, String id) { + + if (!isAuthorized(context)) { + throw new RESTAuthorizationException("Only admin users of community or collection or item " + + "are allowed to bulk access condition"); + } + + BulkAccessConditionConfiguration bulkConfiguration = + bulkAccessConditionConfigurationService.getBulkAccessConditionConfiguration(id); + + return Objects.nonNull(bulkConfiguration) ? + converter.toRest(bulkConfiguration, utils.obtainProjection()) : null; + } + + @Override + @PreAuthorize("permitAll()") + public Page findAll(Context context, Pageable pageable) { + + if (!isAuthorized(context)) { + throw new RESTAuthorizationException("Only admin users of community or collection or item " + + "are allowed to bulk access condition"); + } + + List configurations = + bulkAccessConditionConfigurationService.getBulkAccessConditionConfigurations(); + + return converter.toRestPage(configurations, pageable, configurations.size(), utils.obtainProjection()); + } + + @Override + public Class getDomainClass() { + return BulkAccessConditionRest.class; + } + + private boolean isAuthorized(Context context) { + try { + return context.getCurrentUser() != null && + (authorizeService.isAdmin(context) || authorizeService.isComColAdmin(context) || + authorizeService.isItemAdmin(context)); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundlePrimaryBitstreamLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundlePrimaryBitstreamLinkRepository.java index ed580a21b746..3d11379cd328 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundlePrimaryBitstreamLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundlePrimaryBitstreamLinkRepository.java @@ -12,9 +12,12 @@ import javax.annotation.Nullable; import javax.servlet.http.HttpServletRequest; +import org.dspace.app.rest.exception.DSpaceBadRequestException; +import org.dspace.app.rest.exception.UnprocessableEntityException; import org.dspace.app.rest.model.BitstreamRest; import org.dspace.app.rest.model.BundleRest; import org.dspace.app.rest.projection.Projection; +import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.service.BundleService; import org.dspace.core.Context; @@ -34,6 +37,19 @@ public class BundlePrimaryBitstreamLinkRepository extends AbstractDSpaceRestRepo @Autowired BundleService bundleService; + /** + * Retrieves the primaryBitstream of a Bundle. + * Returns null if Bundle doesn't have a primaryBitstream. + *
      + * curl -X GET "http://{dspace.server.url}/api/core/bundles/{bundle-uuid}/primaryBitstream" + * + * + * @param request The HttpServletRequest if relevant + * @param bundleId The UUID of the Bundle + * @param optionalPageable The pageable if relevant + * @param projection The projection to use + * @return The primaryBitstream, or null if not found + */ @PreAuthorize("hasPermission(#bundleId, 'BUNDLE', 'READ')") public BitstreamRest getPrimaryBitstream(@Nullable HttpServletRequest request, UUID bundleId, @@ -53,4 +69,98 @@ public BitstreamRest getPrimaryBitstream(@Nullable HttpServletRequest request, throw new RuntimeException(e); } } + + /** + * Sets a primaryBitstream on a Bundle. + * + * @param context The current DSpace context + * @param bundleId The UUID of the Bundle + * @param bitstream The Bitstream to use as primaryBitstream + * @param projection The projection to use + * @return The Bundle + */ + @PreAuthorize("hasPermission(#bundleId, 'BUNDLE', 'WRITE')") + public BundleRest createPrimaryBitstream(Context context, UUID bundleId, + Bitstream bitstream, Projection projection) { + try { + Bundle bundle = setPrimaryBitstream(context, bundleId, bitstream, true); + return converter.toRest(context.reloadEntity(bundle), projection); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + /** + * Updates a primaryBitstream on a Bundle. + * + * @param context The current DSpace context + * @param bundleId The UUID of the Bundle + * @param bitstream The Bitstream to use as primaryBitstream + * @param projection The projection to use + * @return The Bundle + */ + @PreAuthorize("hasPermission(#bundleId, 'BUNDLE', 'WRITE')") + public BundleRest updatePrimaryBitstream(Context context, UUID bundleId, + Bitstream bitstream, Projection projection) { + try { + Bundle bundle = setPrimaryBitstream(context, bundleId, bitstream, false); + return converter.toRest(context.reloadEntity(bundle), projection); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + /** + * Deletes the primaryBitstream on a Bundle. + * + * @param context The current DSpace context + * @param bundleId The UUID of the Bundle + */ + @PreAuthorize("hasPermission(#bundleId, 'BUNDLE', 'WRITE')") + public void deletePrimaryBitstream(Context context, UUID bundleId) { + try { + Bundle bundle = setPrimaryBitstream(context, bundleId, null, false); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + /** + * Internal method to set the primaryBitstream on a Bundle. + * + * @param context The current DSpace context + * @param bundleId The UUID of the Bundle + * @param bitstream The Bitstream to use as primaryBitstream + * @param shouldBeSet Whether a primaryBitstream should already be set: + * primaryBitstream should be present before updating or deleting, + * it should be null before adding + * @return The Bundle + * @throws ResourceNotFoundException if the bundle is not found + * @throws DSpaceBadRequestException if primaryBitstream exists during an POST, + * if primaryBitstream is null during an UPDATE or DELETE + * @throws UnprocessableEntityException if the bundle does not contain the bitstream + */ + private Bundle setPrimaryBitstream(Context context, UUID bundleId, Bitstream bitstream, boolean shouldBeSet) + throws SQLException { + Bundle bundle = bundleService.find(context, bundleId); + if (bundle == null) { + throw new ResourceNotFoundException("No such bundle: " + bundleId); + } + if (!shouldBeSet && bundle.getPrimaryBitstream() == null) { + throw new DSpaceBadRequestException("Bundle '" + bundle.getName() + + "' does not have a primary bitstream."); + } + if (shouldBeSet && bundle.getPrimaryBitstream() != null) { + throw new DSpaceBadRequestException("Bundle '" + bundle.getName() + + "' already has a primary bitstream."); + } + if (bitstream != null && !bundle.getBitstreams().contains(bitstream)) { + throw new UnprocessableEntityException("Bundle '" + bundle.getName() + "' does not contain " + + "bitstream with id: " + bitstream.getID()); + } + + bundle.setPrimaryBitstreamID(bitstream); + context.commit(); + return bundle; + } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepository.java index c77dcf18dc7b..3c728d8c31b9 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepository.java @@ -8,6 +8,7 @@ package org.dspace.app.rest.repository; import java.sql.SQLException; +import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.UUID; @@ -31,6 +32,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort.Order; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Component; @@ -72,6 +74,14 @@ public Page getCollections(@Nullable HttpServletRequest request, discoverQuery.setStart(Math.toIntExact(pageable.getOffset())); discoverQuery.setMaxResults(pageable.getPageSize()); discoverQuery.setSortField("dc.title_sort", DiscoverQuery.SORT_ORDER.asc); + Iterator orderIterator = pageable.getSort().iterator(); + if (orderIterator.hasNext()) { + Order order = orderIterator.next(); + discoverQuery.setSortField( + order.getProperty() + "_sort", + order.getDirection().isAscending() ? DiscoverQuery.SORT_ORDER.asc : DiscoverQuery.SORT_ORDER.desc + ); + } DiscoverResult resp = searchService.search(context, scopeObject, discoverQuery); long tot = resp.getTotalSearchResults(); for (IndexableObject solrCol : resp.getIndexableObjects()) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepository.java index c211810d11f9..135d964f3f42 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepository.java @@ -8,6 +8,7 @@ package org.dspace.app.rest.repository; import java.sql.SQLException; +import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.UUID; @@ -29,6 +30,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort.Order; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Component; @@ -68,6 +70,14 @@ public Page getSubcommunities(@Nullable HttpServletRequest reques discoverQuery.setStart(Math.toIntExact(pageable.getOffset())); discoverQuery.setMaxResults(pageable.getPageSize()); discoverQuery.setSortField("dc.title_sort", DiscoverQuery.SORT_ORDER.asc); + Iterator orderIterator = pageable.getSort().iterator(); + if (orderIterator.hasNext()) { + Order order = orderIterator.next(); + discoverQuery.setSortField( + order.getProperty() + "_sort", + order.getDirection().isAscending() ? DiscoverQuery.SORT_ORDER.asc : DiscoverQuery.SORT_ORDER.desc + ); + } DiscoverResult resp = searchService.search(context, scopeObject, discoverQuery); long tot = resp.getTotalSearchResults(); for (IndexableObject solrCommunities : resp.getIndexableObjects()) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CrisLayoutTabRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CrisLayoutTabRestRepository.java index 301ada1b1890..f0017576958a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CrisLayoutTabRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CrisLayoutTabRestRepository.java @@ -106,6 +106,7 @@ public Page findByEntityType( tabList = service.findByEntityType( context, type, + null, pageable.getPageSize(), (pageable.getPageNumber() * pageable.getPageSize()) ); } catch (SQLException e) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java index 01f127eca5ac..a93f5e55dc02 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java @@ -195,7 +195,11 @@ public long count() { /** * Delete the object identified by its ID */ - public void deleteById(ID id) { + /** + * Method should be synchronized to avoid hibernate partial deletion bug when deleting multiple bitstreams: + * https://github.com/DSpace/DSpace/issues/8694 + */ + public synchronized void deleteById(ID id) { Context context = obtainContext(); try { getThisRepository().delete(context, id); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java index f28f1bb8fda2..d11a634bc3be 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java @@ -85,7 +85,7 @@ public SearchConfigurationRest getSearchConfiguration(final String dsoScope, fin IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); + .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject); return discoverConfigurationConverter.convert(discoveryConfiguration, utils.obtainProjection()); } @@ -97,7 +97,7 @@ public SearchResultsRest getSearchObjects(final String query, final List Context context = obtainContext(); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); + .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject); boolean isRelatedItem = discoveryConfiguration != null && discoveryConfiguration instanceof DiscoveryRelatedItemConfiguration; @@ -129,7 +129,7 @@ public FacetConfigurationRest getFacetsConfiguration(final String dsoScope, fina IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); + .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject); return discoverFacetConfigurationConverter.convert(configuration, dsoScope, discoveryConfiguration); } @@ -146,7 +146,7 @@ public FacetResultsRest getFacetObjects(String facetName, String prefix, String IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); + .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject); boolean isRelatedItem = discoveryConfiguration != null && discoveryConfiguration instanceof DiscoveryRelatedItemConfiguration; @@ -179,7 +179,7 @@ public SearchResultsRest getAllFacets(String query, List dsoTypes, Strin Pageable page = PageRequest.of(1, 1); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); + .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject); boolean isRelatedItem = discoveryConfiguration != null && discoveryConfiguration instanceof DiscoveryRelatedItemConfiguration; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/EPersonRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/EPersonRestRepository.java index 566917854532..a9bbdd3f785e 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/EPersonRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/EPersonRestRepository.java @@ -197,7 +197,7 @@ private EPersonRest createAndReturn(Context context, EPersonRest epersonRest, St throw new DSpaceBadRequestException("The self registered property cannot be set to false using this method" + " with a token"); } - checkRequiredProperties(epersonRest); + checkRequiredProperties(registrationData, epersonRest); // We'll turn off authorisation system because this call isn't admin based as it's token based context.turnOffAuthorisationSystem(); EPerson ePerson = createEPersonFromRestObject(context, epersonRest); @@ -212,8 +212,8 @@ private EPersonRest createAndReturn(Context context, EPersonRest epersonRest, St return converter.toRest(ePerson, utils.obtainProjection()); } - private void checkRequiredProperties(EPersonRest epersonRest) { - MetadataRest metadataRest = epersonRest.getMetadata(); + private void checkRequiredProperties(RegistrationData registration, EPersonRest epersonRest) { + MetadataRest metadataRest = epersonRest.getMetadata(); if (metadataRest != null) { List epersonFirstName = metadataRest.getMap().get("eperson.firstname"); List epersonLastName = metadataRest.getMap().get("eperson.lastname"); @@ -222,12 +222,27 @@ private void checkRequiredProperties(EPersonRest epersonRest) { throw new EPersonNameNotProvidedException(); } } + String password = epersonRest.getPassword(); - if (StringUtils.isBlank(password)) { - throw new DSpaceBadRequestException("A password is required"); + String netId = epersonRest.getNetid(); + if (StringUtils.isBlank(password) && StringUtils.isBlank(netId)) { + throw new DSpaceBadRequestException( + "You must provide a password or register using an external account" + ); + } + + if (StringUtils.isBlank(password) && !canRegisterExternalAccount(registration, epersonRest)) { + throw new DSpaceBadRequestException( + "Cannot register external account with netId: " + netId + ); } } + private boolean canRegisterExternalAccount(RegistrationData registration, EPersonRest epersonRest) { + return accountService.isTokenValidForCreation(registration) && + StringUtils.equals(registration.getNetId(), epersonRest.getNetid()); + } + @Override @PreAuthorize("hasPermission(#id, 'EPERSON', 'READ')") public EPersonRest findOne(Context context, UUID id) { @@ -305,6 +320,35 @@ public Page findByMetadata(@Parameter(value = "query", required = t } } + /** + * Find the EPersons matching the query parameter which are NOT a member of the given Group. + * The search is delegated to the + * {@link EPersonService#searchNonMembers(Context, String, Group, int, int)} method + * + * @param groupUUID the *required* group UUID to exclude results from + * @param query is the *required* query string + * @param pageable contains the pagination information + * @return a Page of EPersonRest instances matching the user query + */ + @PreAuthorize("hasAuthority('ADMIN') || hasAuthority('MANAGE_ACCESS_GROUP')") + @SearchRestMethod(name = "isNotMemberOf") + public Page findIsNotMemberOf(@Parameter(value = "group", required = true) UUID groupUUID, + @Parameter(value = "query", required = true) String query, + Pageable pageable) { + + try { + Context context = obtainContext(); + Group excludeGroup = groupService.find(context, groupUUID); + long total = es.searchNonMembersCount(context, query, excludeGroup); + List epersons = es.searchNonMembers(context, query, excludeGroup, + Math.toIntExact(pageable.getOffset()), + Math.toIntExact(pageable.getPageSize())); + return converter.toRestPage(epersons, pageable, total, utils.obtainProjection()); + } catch (SQLException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + @Override @PreAuthorize("hasPermission(#uuid, 'EPERSON', #patch)") protected void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID uuid, @@ -393,6 +437,30 @@ public EPersonRest joinUserToGroups(UUID uuid, String token) throws AuthorizeExc throw new RuntimeException(e.getMessage()); } } + + public EPersonRest mergeFromRegistrationData( + Context context, UUID uuid, String token, List override + ) throws AuthorizeException { + try { + + if (uuid == null) { + throw new DSpaceBadRequestException("The uuid of the person cannot be null"); + } + + if (token == null) { + throw new DSpaceBadRequestException("You must provide a token for the eperson"); + } + + return converter.toRest( + accountService.mergeRegistration(context, uuid, token, override), + utils.obtainProjection() + ); + } catch (SQLException e) { + log.error(e); + throw new RuntimeException(e); + } + } + @Override public void afterPropertiesSet() throws Exception { discoverableEndpointsService.register(this, Arrays.asList( diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupEPersonLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupEPersonLinkRepository.java index b1cdc401f22f..1ce278893d17 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupEPersonLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupEPersonLinkRepository.java @@ -8,6 +8,8 @@ package org.dspace.app.rest.repository; import java.sql.SQLException; +import java.util.List; +import java.util.Set; import java.util.UUID; import javax.annotation.Nullable; import javax.servlet.http.HttpServletRequest; @@ -15,7 +17,9 @@ import org.dspace.app.rest.model.GroupRest; import org.dspace.app.rest.projection.Projection; import org.dspace.core.Context; +import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; +import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.GroupService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; @@ -31,6 +35,9 @@ public class GroupEPersonLinkRepository extends AbstractDSpaceRestRepository implements LinkRestRepository { + @Autowired + EPersonService epersonService; + @Autowired GroupService groupService; @@ -45,7 +52,11 @@ public Page getMembers(@Nullable HttpServletRequest request, if (group == null) { throw new ResourceNotFoundException("No such group: " + groupId); } - return converter.toRestPage(group.getMembers(), optionalPageable, projection); + int total = epersonService.countByGroups(context, Set.of(group)); + Pageable pageable = utils.getPageable(optionalPageable); + List members = epersonService.findByGroups(context, Set.of(group), pageable.getPageSize(), + Math.toIntExact(pageable.getOffset())); + return converter.toRestPage(members, pageable, total, projection); } catch (SQLException e) { throw new RuntimeException(e); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupGroupLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupGroupLinkRepository.java index 37cf9083b39a..564e941d45cc 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupGroupLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupGroupLinkRepository.java @@ -8,6 +8,7 @@ package org.dspace.app.rest.repository; import java.sql.SQLException; +import java.util.List; import java.util.UUID; import javax.annotation.Nullable; import javax.servlet.http.HttpServletRequest; @@ -45,7 +46,11 @@ public Page getGroups(@Nullable HttpServletRequest request, if (group == null) { throw new ResourceNotFoundException("No such group: " + groupId); } - return converter.toRestPage(group.getMemberGroups(), optionalPageable, projection); + int total = groupService.countByParent(context, group); + Pageable pageable = utils.getPageable(optionalPageable); + List memberGroups = groupService.findByParent(context, group, pageable.getPageSize(), + Math.toIntExact(pageable.getOffset())); + return converter.toRestPage(memberGroups, pageable, total, projection); } catch (SQLException e) { throw new RuntimeException(e); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupRestRepository.java index 103abdcae645..a3b525387c62 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupRestRepository.java @@ -148,6 +148,35 @@ public Page findByMetadata(@Parameter(value = "query", required = tru } } + /** + * Find the Groups matching the query parameter which are NOT a member of the given parent Group. + * The search is delegated to the + * {@link GroupService#searchNonMembers(Context, String, Group, int, int)} method + * + * @param groupUUID the parent group UUID + * @param query is the *required* query string + * @param pageable contains the pagination information + * @return a Page of GroupRest instances matching the user query + */ + @PreAuthorize("hasAuthority('ADMIN') || hasAuthority('MANAGE_ACCESS_GROUP')") + @SearchRestMethod(name = "isNotMemberOf") + public Page findIsNotMemberOf(@Parameter(value = "group", required = true) UUID groupUUID, + @Parameter(value = "query", required = true) String query, + Pageable pageable) { + + try { + Context context = obtainContext(); + Group excludeParentGroup = gs.find(context, groupUUID); + long total = gs.searchNonMembersCount(context, query, excludeParentGroup); + List groups = gs.searchNonMembers(context, query, excludeParentGroup, + Math.toIntExact(pageable.getOffset()), + Math.toIntExact(pageable.getPageSize())); + return converter.toRestPage(groups, pageable, total, utils.obtainProjection()); + } catch (SQLException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + @Override public Class getDomainClass() { return GroupRest.class; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ItemRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ItemRestRepository.java index b64da66af9cf..8c7e89565371 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ItemRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ItemRestRepository.java @@ -40,6 +40,7 @@ import org.dspace.app.rest.model.patch.Patch; import org.dspace.app.rest.repository.handler.service.UriListHandlerService; import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Bundle; import org.dspace.content.Collection; import org.dspace.content.Item; @@ -106,6 +107,9 @@ public class ItemRestRepository extends DSpaceObjectRestRepository findAll(Context context, Pageable pageable) { protected void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID id, Patch patch) throws AuthorizeException, SQLException { Item item = itemService.find(context, id); - if (!editMetadataFeature.isAuthorized(context, converter.toRest(item, utils.obtainProjection()))) { + if (!authorizeService.isAdmin(context) && + !editMetadataFeature.isAuthorized(context, converter.toRest(item, utils.obtainProjection()))) { throw new AccessDeniedException("Current user not authorized for this operation"); } patchDSpaceObject(apiCategory, model, id, patch); @@ -350,13 +355,13 @@ protected ItemRest put(Context context, HttpServletRequest request, String apiCa } catch (IOException e1) { throw new UnprocessableEntityException("Error parsing request body", e1); } - if (!editMetadataFeature.isAuthorized(context, itemRest)) { - throw new AccessDeniedException("Current user not authorized for this operation"); - } Item item = itemService.find(context, uuid); if (item == null) { throw new ResourceNotFoundException(apiCategory + "." + model + " with id: " + uuid + " not found"); } + if (!authorizeService.isAdmin(context) && !editMetadataFeature.isAuthorized(context, itemRest)) { + throw new AccessDeniedException("Current user not authorized for this operation"); + } if (StringUtils.equals(uuid.toString(), itemRest.getId())) { metadataConverter.setMetadata(context, item, itemRest.getMetadata()); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java index 157a80e264b5..5152f11902f5 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java @@ -14,6 +14,7 @@ import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; +import java.util.Iterator; import java.util.List; import java.util.Objects; import javax.servlet.http.HttpServletRequest; @@ -45,10 +46,10 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Component; - /** * This is the repository responsible to manage MetadataField Rest object * @@ -135,13 +136,14 @@ public Page findByFieldName(@Parameter(value = "schema", requ @Parameter(value = "exactName", required = false) String exactName, Pageable pageable) throws SQLException { Context context = obtainContext(); + long totalElements = 0; List matchingMetadataFields = new ArrayList<>(); if (StringUtils.isBlank(exactName)) { // Find matches in Solr Search core DiscoverQuery discoverQuery = - this.createDiscoverQuery(context, schemaName, elementName, qualifierName, query); + this.createDiscoverQuery(context, schemaName, elementName, qualifierName, query, pageable); try { DiscoverResult searchResult = searchService.search(context, null, discoverQuery); for (IndexableObject object : searchResult.getIndexableObjects()) { @@ -149,6 +151,7 @@ public Page findByFieldName(@Parameter(value = "schema", requ matchingMetadataFields.add(((IndexableMetadataField) object).getIndexedObject()); } } + totalElements = searchResult.getTotalSearchResults(); } catch (SearchServiceException e) { log.error("Error while searching with Discovery", e); throw new IllegalArgumentException("Error while searching with Discovery: " + e.getMessage()); @@ -163,10 +166,11 @@ public Page findByFieldName(@Parameter(value = "schema", requ MetadataField exactMatchingMdField = metadataFieldService.findByString(context, exactName, '.'); if (exactMatchingMdField != null) { matchingMetadataFields.add(exactMatchingMdField); + totalElements = 1; } } - return converter.toRestPage(matchingMetadataFields, pageable, utils.obtainProjection()); + return converter.toRestPage(matchingMetadataFields, pageable, totalElements, utils.obtainProjection()); } /** @@ -182,7 +186,7 @@ public Page findByFieldName(@Parameter(value = "schema", requ * @throws SQLException If DB error */ private DiscoverQuery createDiscoverQuery(Context context, String schemaName, String elementName, - String qualifierName, String query) throws SQLException { + String qualifierName, String query, Pageable pageable) throws SQLException { List filterQueries = new ArrayList<>(); if (StringUtils.isNotBlank(query)) { if (query.split("\\.").length > 3) { @@ -210,6 +214,15 @@ private DiscoverQuery createDiscoverQuery(Context context, String schemaName, St DiscoverQuery discoverQuery = new DiscoverQuery(); discoverQuery.addFilterQueries(filterQueries.toArray(new String[filterQueries.size()])); + Iterator orderIterator = pageable.getSort().iterator(); + if (orderIterator.hasNext()) { + Sort.Order order = orderIterator.next(); + discoverQuery.setSortField(order.getProperty() + "_sort", + order.getDirection() == Sort.Direction.ASC ? DiscoverQuery.SORT_ORDER.asc : + DiscoverQuery.SORT_ORDER.desc); + } + discoverQuery.setStart(Math.toIntExact(pageable.getOffset())); + discoverQuery.setMaxResults(pageable.getPageSize()); return discoverQuery; } @@ -247,10 +260,18 @@ protected MetadataFieldRest createAndReturn(Context context) if (isBlank(metadataFieldRest.getElement())) { throw new UnprocessableEntityException("metadata element (in request body) cannot be blank"); + } else if (!metadataFieldRest.getElement().matches("^[^. ,]{1,64}$")) { + throw new UnprocessableEntityException( + "metadata element (in request body) cannot contain dots, commas or spaces and should be smaller than" + + " 64 characters"); } if (isBlank(metadataFieldRest.getQualifier())) { metadataFieldRest.setQualifier(null); + } else if (!metadataFieldRest.getQualifier().matches("^[^. ,]{1,64}$")) { + throw new UnprocessableEntityException( + "metadata qualifier (in request body) cannot contain dots, commas or spaces and should be smaller" + + " than 64 characters"); } // create @@ -300,24 +321,26 @@ protected MetadataFieldRest put(Context context, HttpServletRequest request, Str try { metadataFieldRest = new ObjectMapper().readValue(jsonNode.toString(), MetadataFieldRest.class); } catch (JsonProcessingException e) { - throw new UnprocessableEntityException("Cannot parse JSON in request body", e); + throw new DSpaceBadRequestException("Cannot parse JSON in request body", e); } - if (metadataFieldRest == null || isBlank(metadataFieldRest.getElement())) { - throw new UnprocessableEntityException("metadata element (in request body) cannot be blank"); + MetadataField metadataField = metadataFieldService.find(context, id); + if (metadataField == null) { + throw new UnprocessableEntityException("metadata field with id: " + id + " not found"); } - if (!Objects.equals(id, metadataFieldRest.getId())) { - throw new UnprocessableEntityException("ID in request body doesn't match path ID"); + if (!Objects.equals(metadataFieldRest.getElement(), metadataField.getElement())) { + throw new UnprocessableEntityException("Metadata element cannot be updated."); } - MetadataField metadataField = metadataFieldService.find(context, id); - if (metadataField == null) { - throw new ResourceNotFoundException("metadata field with id: " + id + " not found"); + if (!Objects.equals(metadataFieldRest.getQualifier(), metadataField.getQualifier())) { + throw new UnprocessableEntityException("Metadata qualifier cannot be updated."); + } + + if (!Objects.equals(id, metadataFieldRest.getId())) { + throw new UnprocessableEntityException("ID in request body doesn't match path ID"); } - metadataField.setElement(metadataFieldRest.getElement()); - metadataField.setQualifier(metadataFieldRest.getQualifier()); metadataField.setScopeNote(metadataFieldRest.getScopeNote()); try { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataSchemaRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataSchemaRestRepository.java index 2865a2f1dff2..d9c148b71c0d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataSchemaRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataSchemaRestRepository.java @@ -93,6 +93,10 @@ protected MetadataSchemaRest createAndReturn(Context context) // validate fields if (isBlank(metadataSchemaRest.getPrefix())) { throw new UnprocessableEntityException("metadata schema name cannot be blank"); + } else if (!metadataSchemaRest.getPrefix().matches("^[^. ,]{1,32}$")) { + throw new UnprocessableEntityException( + "metadata schema namespace cannot contain dots, commas or spaces and should be smaller than" + + " 32 characters"); } if (isBlank(metadataSchemaRest.getNamespace())) { throw new UnprocessableEntityException("metadata schema namespace cannot be blank"); @@ -142,11 +146,16 @@ protected MetadataSchemaRest put(Context context, HttpServletRequest request, St try { metadataSchemaRest = new ObjectMapper().readValue(jsonNode.toString(), MetadataSchemaRest.class); } catch (JsonProcessingException e) { - throw new UnprocessableEntityException("Cannot parse JSON in request body", e); + throw new DSpaceBadRequestException("Cannot parse JSON in request body", e); } - if (metadataSchemaRest == null || isBlank(metadataSchemaRest.getPrefix())) { - throw new UnprocessableEntityException("metadata schema name cannot be blank"); + MetadataSchema metadataSchema = metadataSchemaService.find(context, id); + if (metadataSchema == null) { + throw new ResourceNotFoundException("metadata schema with id: " + id + " not found"); + } + + if (!Objects.equals(metadataSchemaRest.getPrefix(), metadataSchema.getName())) { + throw new UnprocessableEntityException("Metadata schema name cannot be updated."); } if (isBlank(metadataSchemaRest.getNamespace())) { throw new UnprocessableEntityException("metadata schema namespace cannot be blank"); @@ -156,12 +165,6 @@ protected MetadataSchemaRest put(Context context, HttpServletRequest request, St throw new UnprocessableEntityException("ID in request doesn't match path ID"); } - MetadataSchema metadataSchema = metadataSchemaService.find(context, id); - if (metadataSchema == null) { - throw new ResourceNotFoundException("metadata schema with id: " + id + " not found"); - } - - metadataSchema.setName(metadataSchemaRest.getPrefix()); metadataSchema.setNamespace(metadataSchemaRest.getNamespace()); try { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessFileTypesLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessFileTypesLinkRepository.java index 8eb8d7ef652a..16c8115b29f8 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessFileTypesLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessFileTypesLinkRepository.java @@ -47,7 +47,7 @@ public class ProcessFileTypesLinkRepository extends AbstractDSpaceRestRepository * @throws SQLException If something goes wrong * @throws AuthorizeException If something goes wrong */ - @PreAuthorize("hasAuthority('ADMIN')") + @PreAuthorize("hasPermission(#processId, 'PROCESS', 'READ')") public ProcessFileTypesRest getFileTypesFromProcess(@Nullable HttpServletRequest request, Integer processId, @Nullable Pageable optionalPageable, diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java index 3fbd6c9d9163..f36ad4177de5 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java @@ -14,10 +14,10 @@ import java.util.List; import java.util.Objects; import java.util.UUID; -import java.util.stream.Collectors; import javax.mail.MessagingException; import javax.servlet.ServletInputStream; import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.BadRequestException; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.lang3.StringUtils; @@ -29,6 +29,9 @@ import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException; import org.dspace.app.rest.exception.UnprocessableEntityException; import org.dspace.app.rest.model.RegistrationRest; +import org.dspace.app.rest.model.patch.Patch; +import org.dspace.app.rest.repository.patch.ResourcePatch; +import org.dspace.app.rest.utils.Utils; import org.dspace.app.util.AuthorizeUtil; import org.dspace.authenticate.service.AuthenticationService; import org.dspace.authorize.AuthorizeException; @@ -39,6 +42,7 @@ import org.dspace.eperson.Group; import org.dspace.eperson.InvalidReCaptchaException; import org.dspace.eperson.RegistrationData; +import org.dspace.eperson.RegistrationTypeEnum; import org.dspace.eperson.service.AccountService; import org.dspace.eperson.service.CaptchaService; import org.dspace.eperson.service.EPersonService; @@ -61,9 +65,10 @@ public class RegistrationRestRepository extends DSpaceRestRepository resourcePatch; + @Override public RegistrationRest findOne(Context context, Integer integer) { throw new RepositoryMethodNotImplementedException("No implementation found; Method not allowed!", ""); @@ -107,7 +118,6 @@ public RegistrationRest createAndReturn(Context context) { HttpServletRequest request = requestService.getCurrentRequest().getHttpServletRequest(); ObjectMapper mapper = new ObjectMapper(); RegistrationRest registrationRest; - String captchaToken = request.getHeader("X-Recaptcha-Token"); boolean verificationEnabled = configurationService.getBooleanProperty("registration.verification.enabled"); @@ -132,7 +142,7 @@ public RegistrationRest createAndReturn(Context context) { try { if (Objects.isNull(context.getCurrentUser()) || (!authorizeService.isAdmin(context) - && !hasPermission(context, registrationRest.getGroups()))) { + && !hasPermission(context, registrationRest.getGroups()))) { throw new AccessDeniedException("Only admin users can invite new users to join groups"); } } catch (SQLException e) { @@ -143,7 +153,8 @@ public RegistrationRest createAndReturn(Context context) { if (StringUtils.isBlank(accountType) || (!accountType.equalsIgnoreCase(TYPE_FORGOT) && !accountType.equalsIgnoreCase(TYPE_REGISTER))) { throw new IllegalArgumentException(String.format("Needs query param '%s' with value %s or %s indicating " + - "what kind of registration request it is", TYPE_QUERY_PARAM, TYPE_FORGOT, TYPE_REGISTER)); + "what kind of registration request it is", + TYPE_QUERY_PARAM, TYPE_FORGOT, TYPE_REGISTER)); } EPerson eperson = null; try { @@ -155,32 +166,32 @@ public RegistrationRest createAndReturn(Context context) { try { if (!AuthorizeUtil.authorizeUpdatePassword(context, eperson.getEmail())) { throw new DSpaceBadRequestException("Password cannot be updated for the given EPerson with email: " - + eperson.getEmail()); + + eperson.getEmail()); } accountService.sendForgotPasswordInfo(context, registrationRest.getEmail(), - registrationRest.getGroups()); + registrationRest.getGroups()); } catch (SQLException | IOException | MessagingException | AuthorizeException e) { log.error("Something went wrong with sending forgot password info email: " - + registrationRest.getEmail(), e); + + registrationRest.getEmail(), e); } } else if (accountType.equalsIgnoreCase(TYPE_REGISTER)) { try { String email = registrationRest.getEmail(); if (!AuthorizeUtil.authorizeNewAccountRegistration(context, request)) { throw new AccessDeniedException( - "Registration is disabled, you are not authorized to create a new Authorization"); + "Registration is disabled, you are not authorized to create a new Authorization"); } if (!authenticationService.canSelfRegister(context, request, registrationRest.getEmail())) { throw new UnprocessableEntityException( String.format("Registration is not allowed with email address" + - " %s", email)); + " %s", email)); } accountService.sendRegistrationInfo(context, registrationRest.getEmail(), registrationRest.getGroups()); } catch (SQLException | IOException | MessagingException | AuthorizeException e) { log.error("Something went wrong with sending registration info email: " - + registrationRest.getEmail(), e); + + registrationRest.getEmail(), e); } } return null; @@ -201,16 +212,12 @@ private boolean hasPermission(Context context, List groups) throws SQLExce return true; } - @Override - public Class getDomainClass() { - return RegistrationRest.class; - } - /** * This method will find the RegistrationRest object that is associated with the token given + * * @param token The token to be found and for which a RegistrationRest object will be found - * @return A RegistrationRest object for the given token - * @throws SQLException If something goes wrong + * @return A RegistrationRest object for the given token + * @throws SQLException If something goes wrong * @throws AuthorizeException If something goes wrong */ @SearchRestMethod(name = "findByToken") @@ -221,22 +228,55 @@ public RegistrationRest findByToken(@Parameter(value = "token", required = true) if (registrationData == null) { throw new ResourceNotFoundException("The token: " + token + " couldn't be found"); } - RegistrationRest registrationRest = new RegistrationRest(); - registrationRest.setEmail(registrationData.getEmail()); - EPerson ePerson = accountService.getEPerson(context, token); - if (ePerson != null) { - registrationRest.setUser(ePerson.getID()); + return converter.toRest(registrationData, utils.obtainProjection()); + } + + @Override + public RegistrationRest patch( + HttpServletRequest request, String apiCategory, String model, Integer id, Patch patch + ) throws UnprocessableEntityException, DSpaceBadRequestException { + if (id == null || id <= 0) { + throw new BadRequestException("The id of the registration cannot be null or negative"); + } + if (patch == null || patch.getOperations() == null || patch.getOperations().isEmpty()) { + throw new BadRequestException("Patch request is incomplete: cannot find operations"); + } + String token = request.getParameter("token"); + if (token == null || token.trim().isBlank()) { + throw new AccessDeniedException("The token is required"); + } + Context context = obtainContext(); + + validateToken(context, token); + + try { + resourcePatch.patch(context, registrationDataService.find(context, id), patch.getOperations()); + context.commit(); + } catch (SQLException e) { + throw new RuntimeException(e.getMessage(), e); + } + return null; + } + + private void validateToken(Context context, String token) { + try { + RegistrationData registrationData = + registrationDataService.findByToken(context, token); + if (registrationData == null || !registrationDataService.isValid(registrationData)) { + throw new AccessDeniedException("The token is invalid"); + } + } catch (SQLException e) { + throw new RuntimeException(e); } - List groupNames = registrationData.getGroups() - .stream().map(Group::getName).collect(Collectors.toList()); - registrationRest.setGroupNames(groupNames); - registrationRest.setGroups(registrationData - .getGroups().stream().map(Group::getID).collect(Collectors.toList())); - return registrationRest; } public void setCaptchaService(CaptchaService captchaService) { this.captchaService = captchaService; } + @Override + public Class getDomainClass() { + return RegistrationRest.class; + } + } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java index 198d0f765f87..6eb631cfa56e 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java @@ -17,18 +17,15 @@ import java.sql.SQLException; import java.util.Date; import java.util.UUID; -import javax.annotation.Resource; import javax.servlet.http.HttpServletRequest; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.commons.text.StringEscapeUtils; import org.apache.commons.validator.routines.EmailValidator; import org.apache.http.client.utils.URIBuilder; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.app.requestitem.RequestItem; -import org.dspace.app.requestitem.RequestItemAuthorExtractor; import org.dspace.app.requestitem.RequestItemEmailNotifier; import org.dspace.app.requestitem.service.RequestItemService; import org.dspace.app.rest.converter.RequestItemConverter; @@ -50,7 +47,7 @@ import org.springframework.data.domain.Pageable; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Component; - +import org.springframework.web.util.HtmlUtils; /** * Component to expose item requests. * @@ -73,12 +70,12 @@ public class RequestItemRepository @Autowired(required = true) protected RequestItemConverter requestItemConverter; - @Resource(name = "requestItemAuthorExtractor") - protected RequestItemAuthorExtractor requestItemAuthorExtractor; - @Autowired(required = true) protected ConfigurationService configurationService; + @Autowired(required = true) + protected RequestItemEmailNotifier requestItemEmailNotifier; + /* * DSpaceRestRepository */ @@ -175,11 +172,11 @@ public RequestItemRest createAndReturn(Context ctx) username = user.getFullName(); } else { // An anonymous session may provide a name. // Escape username to evade nasty XSS attempts - username = StringEscapeUtils.escapeHtml4(rir.getRequestName()); + username = HtmlUtils.htmlEscape(rir.getRequestName(),"UTF-8"); } // Requester's message text, escaped to evade nasty XSS attempts - String message = StringEscapeUtils.escapeHtml4(rir.getRequestMessage()); + String message = HtmlUtils.htmlEscape(rir.getRequestMessage(),"UTF-8"); // Create the request. String token; @@ -203,12 +200,12 @@ public RequestItemRest createAndReturn(Context ctx) // Send the request email try { - RequestItemEmailNotifier.sendRequest(ctx, ri, responseLink); + requestItemEmailNotifier.sendRequest(ctx, ri, responseLink); } catch (IOException | SQLException ex) { throw new RuntimeException("Request not sent.", ex); } - - return requestItemConverter.convert(ri, Projection.DEFAULT); + // #8636 - Security issue: Should not return RequestItemRest to avoid token exposure + return null; } // NOTICE: there is no service method for this -- requests are never deleted? @@ -245,7 +242,10 @@ public RequestItemRest put(Context context, HttpServletRequest request, } JsonNode responseMessageNode = requestBody.findValue("responseMessage"); - String message = responseMessageNode.asText(); + String message = null; + if (responseMessageNode != null && !responseMessageNode.isNull()) { + message = responseMessageNode.asText(); + } ri.setDecision_date(new Date()); requestItemService.update(context, ri); @@ -253,7 +253,7 @@ public RequestItemRest put(Context context, HttpServletRequest request, // Send the response email String subject = requestBody.findValue("subject").asText(); try { - RequestItemEmailNotifier.sendResponse(context, ri, subject, message); + requestItemEmailNotifier.sendResponse(context, ri, subject, message); } catch (IOException ex) { LOG.warn("Response not sent: {}", ex::getMessage); throw new RuntimeException("Response not sent", ex); @@ -262,7 +262,7 @@ public RequestItemRest put(Context context, HttpServletRequest request, // Perhaps send Open Access request to admin.s. if (requestBody.findValue("suggestOpenAccess").asBoolean(false)) { try { - RequestItemEmailNotifier.requestOpenAccess(context, ri); + requestItemEmailNotifier.requestOpenAccess(context, ri); } catch (IOException ex) { LOG.warn("Open access request not sent: {}", ex::getMessage); throw new RuntimeException("Open access request not sent", ex); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ResourcePolicyRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ResourcePolicyRestRepository.java index 0b77f96b9b5f..72ca3f254256 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ResourcePolicyRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ResourcePolicyRestRepository.java @@ -30,7 +30,9 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.content.Bitstream; import org.dspace.content.DSpaceObject; +import org.dspace.content.service.BitstreamService; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.eperson.EPerson; @@ -76,6 +78,9 @@ public class ResourcePolicyRestRepository extends DSpaceRestRepository dSpaceCommandLineParameters = processPropertiesToDSpaceCommandLineParameters(properties); ScriptConfiguration scriptToExecute = scriptService.getScriptConfiguration(scriptName); + if (scriptToExecute == null) { - throw new DSpaceBadRequestException("The script for name: " + scriptName + " wasn't found"); + throw new ResourceNotFoundException("The script for name: " + scriptName + " wasn't found"); } - if (!scriptToExecute.isAllowedToExecute(context)) { - throw new AuthorizeException("Current user is not eligible to execute script with name: " + scriptName); + try { + if (!scriptToExecute.isAllowedToExecute(context, dSpaceCommandLineParameters)) { + throw new AuthorizeException("Current user is not eligible to execute script with name: " + scriptName + + " and the specified parameters " + StringUtils.join(dSpaceCommandLineParameters, ", ")); + } + } catch (IllegalArgumentException e) { + throw new DSpaceBadRequestException("Illegal argoument " + e.getMessage(), e); } EPerson user = context.getCurrentUser(); RestDSpaceRunnableHandler restDSpaceRunnableHandler = new RestDSpaceRunnableHandler(user, diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionDefinitionRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionDefinitionRestRepository.java index 0da217f3812c..b9f69c25e0f3 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionDefinitionRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionDefinitionRestRepository.java @@ -17,12 +17,13 @@ import org.dspace.app.rest.SearchRestMethod; import org.dspace.app.rest.model.SubmissionDefinitionRest; import org.dspace.app.util.SubmissionConfig; -import org.dspace.app.util.SubmissionConfigReader; import org.dspace.app.util.SubmissionConfigReaderException; import org.dspace.content.Collection; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; import org.dspace.core.Context; +import org.dspace.submit.factory.SubmissionServiceFactory; +import org.dspace.submit.service.SubmissionConfigService; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.security.access.prepost.PreAuthorize; @@ -35,18 +36,18 @@ */ @Component(SubmissionDefinitionRest.CATEGORY + "." + SubmissionDefinitionRest.NAME) public class SubmissionDefinitionRestRepository extends DSpaceRestRepository { - private SubmissionConfigReader submissionConfigReader; + private SubmissionConfigService submissionConfigService; private CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); public SubmissionDefinitionRestRepository() throws SubmissionConfigReaderException { - submissionConfigReader = new SubmissionConfigReader(); + submissionConfigService = SubmissionServiceFactory.getInstance().getSubmissionConfigService(); } @PreAuthorize("hasAuthority('AUTHENTICATED')") @Override public SubmissionDefinitionRest findOne(Context context, String submitName) { - SubmissionConfig subConfig = submissionConfigReader.getSubmissionConfigByName(submitName); + SubmissionConfig subConfig = submissionConfigService.getSubmissionConfigByName(submitName); if (subConfig == null) { return null; } @@ -56,8 +57,8 @@ public SubmissionDefinitionRest findOne(Context context, String submitName) { @PreAuthorize("hasAuthority('AUTHENTICATED')") @Override public Page findAll(Context context, Pageable pageable) { - int total = submissionConfigReader.countSubmissionConfigs(); - List subConfs = submissionConfigReader.getAllSubmissionConfigs( + int total = submissionConfigService.countSubmissionConfigs(); + List subConfs = submissionConfigService.getAllSubmissionConfigs( pageable.getPageSize(), Math.toIntExact(pageable.getOffset())); subConfs = subConfs.stream() @@ -75,13 +76,10 @@ public SubmissionDefinitionRest findByCollection(@Parameter(value = "uuid", requ if (col == null) { return null; } - - SubmissionConfig submissionConfig = submissionConfigReader.getSubmissionConfigByCollection(col); - if (submissionConfig == null) { - return null; - } - - return converter.toRest(submissionConfig, utils.obtainProjection()); + SubmissionDefinitionRest def = converter + .toRest(submissionConfigService.getSubmissionConfigByCollection(col), + utils.obtainProjection()); + return def; } @Override diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionPanelRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionPanelRestRepository.java index 2046a816eb0a..62d104c0a6d3 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionPanelRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionPanelRestRepository.java @@ -13,10 +13,11 @@ import org.dspace.app.rest.model.SubmissionDefinitionRest; import org.dspace.app.rest.model.SubmissionSectionRest; import org.dspace.app.util.SubmissionConfig; -import org.dspace.app.util.SubmissionConfigReader; import org.dspace.app.util.SubmissionConfigReaderException; import org.dspace.app.util.SubmissionStepConfig; import org.dspace.core.Context; +import org.dspace.submit.factory.SubmissionServiceFactory; +import org.dspace.submit.service.SubmissionConfigService; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.security.access.prepost.PreAuthorize; @@ -30,17 +31,17 @@ @Component(SubmissionDefinitionRest.CATEGORY + "." + SubmissionSectionRest.NAME) public class SubmissionPanelRestRepository extends DSpaceRestRepository { - private SubmissionConfigReader submissionConfigReader; + private SubmissionConfigService submissionConfigService; public SubmissionPanelRestRepository() throws SubmissionConfigReaderException { - submissionConfigReader = new SubmissionConfigReader(); + submissionConfigService = SubmissionServiceFactory.getInstance().getSubmissionConfigService(); } @PreAuthorize("hasAuthority('AUTHENTICATED')") @Override public SubmissionSectionRest findOne(Context context, String id) { try { - SubmissionStepConfig step = submissionConfigReader.getStepConfig(id); + SubmissionStepConfig step = submissionConfigService.getStepConfig(id); return converter.toRest(step, utils.obtainProjection()); } catch (SubmissionConfigReaderException e) { //TODO wrap with a specific exception @@ -51,7 +52,7 @@ public SubmissionSectionRest findOne(Context context, String id) { @PreAuthorize("hasAuthority('AUTHENTICATED')") @Override public Page findAll(Context context, Pageable pageable) { - List subConfs = submissionConfigReader.getAllSubmissionConfigs( + List subConfs = submissionConfigService.getAllSubmissionConfigs( pageable.getPageSize(), Math.toIntExact(pageable.getOffset())); long total = 0; List stepConfs = new ArrayList<>(); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionUploadRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionUploadRestRepository.java index 4571b86d89cf..eadbbdf21b6e 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionUploadRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionUploadRestRepository.java @@ -10,6 +10,7 @@ import java.text.ParseException; import java.util.ArrayList; import java.util.Collection; +import java.util.Date; import java.util.List; import org.apache.commons.lang3.StringUtils; @@ -19,11 +20,11 @@ import org.dspace.app.rest.projection.Projection; import org.dspace.app.rest.utils.Utils; import org.dspace.core.Context; -import org.dspace.eperson.service.GroupService; import org.dspace.submit.model.AccessConditionOption; import org.dspace.submit.model.UploadConfiguration; import org.dspace.submit.model.UploadConfigurationService; import org.dspace.util.DateMathParser; +import org.dspace.util.TimeHelpers; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; @@ -51,11 +52,6 @@ public class SubmissionUploadRestRepository extends DSpaceRestRepository findAll(Context context, Pageable pageable) { Collection uploadConfigs = uploadConfigurationService.getMap().values(); Projection projection = utils.obtainProjection(); List results = new ArrayList<>(); - List configNames = new ArrayList(); + List configNames = new ArrayList<>(); for (UploadConfiguration uploadConfig : uploadConfigs) { if (!configNames.contains(uploadConfig.getName())) { configNames.add(uploadConfig.getName()); @@ -96,13 +92,15 @@ public Class getDomainClass() { private SubmissionUploadRest convert(Context context, UploadConfiguration config, Projection projection) { SubmissionUploadRest result = new SubmissionUploadRest(); result.setProjection(projection); + DateMathParser dateMathParser = new DateMathParser(); for (AccessConditionOption option : config.getOptions()) { AccessConditionOptionRest optionRest = new AccessConditionOptionRest(); optionRest.setHasStartDate(option.getHasStartDate()); optionRest.setHasEndDate(option.getHasEndDate()); if (StringUtils.isNotBlank(option.getStartDateLimit())) { try { - optionRest.setMaxStartDate(dateMathParser.parseMath(option.getStartDateLimit())); + Date requested = dateMathParser.parseMath(option.getStartDateLimit()); + optionRest.setMaxStartDate(TimeHelpers.toMidnightUTC(requested)); } catch (ParseException e) { throw new IllegalStateException("Wrong start date limit configuration for the access condition " + "option named " + option.getName()); @@ -110,7 +108,8 @@ private SubmissionUploadRest convert(Context context, UploadConfiguration config } if (StringUtils.isNotBlank(option.getEndDateLimit())) { try { - optionRest.setMaxEndDate(dateMathParser.parseMath(option.getEndDateLimit())); + Date requested = dateMathParser.parseMath(option.getEndDateLimit()); + optionRest.setMaxEndDate(TimeHelpers.toMidnightUTC(requested)); } catch (ParseException e) { throw new IllegalStateException("Wrong end date limit configuration for the access condition " + "option named " + option.getName()); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ViewEventRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ViewEventRestRepository.java index e4214a4c9208..8755f10813ed 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ViewEventRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ViewEventRestRepository.java @@ -12,6 +12,7 @@ import java.util.Arrays; import java.util.List; import java.util.Locale; +import java.util.UUID; import javax.servlet.ServletInputStream; import javax.servlet.http.HttpServletRequest; @@ -21,7 +22,6 @@ import org.dspace.app.rest.exception.UnprocessableEntityException; import org.dspace.app.rest.model.ViewEventRest; import org.dspace.authorize.AuthorizeException; -import org.dspace.content.DSpaceObject; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.DSpaceObjectService; import org.dspace.core.Constants; @@ -51,7 +51,8 @@ public ViewEventRest createViewEvent() throws AuthorizeException, SQLException { } catch (IOException e1) { throw new UnprocessableEntityException("Error parsing request body", e1); } - if (viewEventRest.getTargetId() == null || StringUtils.isBlank(viewEventRest.getTargetType()) || + final UUID targetId = viewEventRest.getTargetId(); + if (targetId == null || StringUtils.isBlank(viewEventRest.getTargetType()) || !typeList.contains(viewEventRest.getTargetType().toUpperCase())) { throw new DSpaceBadRequestException("The given ViewEvent was invalid, one or more properties are either" + " wrong or missing"); @@ -59,13 +60,14 @@ public ViewEventRest createViewEvent() throws AuthorizeException, SQLException { DSpaceObjectService dSpaceObjectService = ContentServiceFactory.getInstance().getDSpaceObjectService( Constants.getTypeID(viewEventRest.getTargetType().toUpperCase(Locale.getDefault()))); - DSpaceObject dSpaceObject = dSpaceObjectService.find(context, viewEventRest.getTargetId()); - if (dSpaceObject == null) { + if (!dSpaceObjectService.exists(context, targetId)) { throw new UnprocessableEntityException( - "The given targetId does not resolve to a DSpaceObject: " + viewEventRest.getTargetId()); + "The given targetId does not resolve to a DSpaceObject: " + targetId); } - UsageEvent usageEvent = new UsageEvent(UsageEvent.Action.VIEW, req, context, dSpaceObject); - eventService.fireEvent(usageEvent); + final String referrer = viewEventRest.getReferrer(); + eventService.fireAsyncEvent( + () -> UsageEvent.createUsageEvent(context, req, dSpaceObjectService, targetId, referrer) + ); return viewEventRest; } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/WorkflowItemRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/WorkflowItemRestRepository.java index ee8dc12e7354..de39ff69fb9c 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/WorkflowItemRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/WorkflowItemRestRepository.java @@ -27,7 +27,6 @@ import org.dspace.app.rest.model.patch.Operation; import org.dspace.app.rest.model.patch.Patch; import org.dspace.app.rest.submit.SubmissionService; -import org.dspace.app.util.SubmissionConfigReader; import org.dspace.app.util.SubmissionConfigReaderException; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.service.AuthorizeService; @@ -40,6 +39,8 @@ import org.dspace.eperson.EPerson; import org.dspace.eperson.EPersonServiceImpl; import org.dspace.services.ConfigurationService; +import org.dspace.submit.factory.SubmissionServiceFactory; +import org.dspace.submit.service.SubmissionConfigService; import org.dspace.workflow.WorkflowException; import org.dspace.workflow.WorkflowService; import org.dspace.xmlworkflow.WorkflowConfigurationException; @@ -109,10 +110,10 @@ public class WorkflowItemRestRepository extends DSpaceRestRepository upload(Context context, HttpServletRequest re collection = collectionService.findAuthorizedOptimized(context, Constants.ADD).get(0); } - SubmissionConfig submissionConfig = submissionConfigReader.getSubmissionConfigByCollection(collection); + SubmissionConfig submissionConfig = + submissionConfigService.getSubmissionConfigByCollection(collection); List result = null; List records = new ArrayList<>(); try { for (MultipartFile mpFile : uploadfiles) { File file = Utils.getFile(mpFile, "upload-loader", "filedataloader"); try { - ImportRecord record = importService.getRecord(file, mpFile.getOriginalFilename()); - if (record != null) { - records.add(record); + List recordsFound = importService.getRecords(file, mpFile.getOriginalFilename()); + if (recordsFound != null && !recordsFound.isEmpty()) { + records.addAll(recordsFound); break; } } catch (Exception e) { @@ -334,11 +336,15 @@ public Iterable upload(Context context, HttpServletRequest re } catch (Exception e) { log.error("Error importing metadata", e); } - WorkspaceItem source = submissionService. - createWorkspaceItem(context, getRequestService().getCurrentRequest()); - merge(context, records, source); - result = new ArrayList<>(); - result.add(source); + result = new ArrayList<>(records.size()); + for (ImportRecord importRecord : records) { + WorkspaceItem source = submissionService. + createWorkspaceItem(context, getRequestService().getCurrentRequest()); + + merge(context, importRecord, source); + + result.add(source); + } //perform upload of bitstream if there is exact one result and convert workspaceitem to entity rest if (!result.isEmpty()) { @@ -348,18 +354,17 @@ public Iterable upload(Context context, HttpServletRequest re //load bitstream into bundle ORIGINAL only if there is one result (approximately this is the // right behaviour for pdf file but not for other bibliographic format e.g. bibtex) if (result.size() == 1) { + ClassLoader loader = this.getClass().getClassLoader(); for (int i = 0; i < submissionConfig.getNumberOfSteps(); i++) { SubmissionStepConfig stepConfig = submissionConfig.getStep(i); - ClassLoader loader = this.getClass().getClassLoader(); - Class stepClass; try { - stepClass = loader.loadClass(stepConfig.getProcessingClassName()); - Object stepInstance = stepClass.newInstance(); + Class stepClass = loader.loadClass(stepConfig.getProcessingClassName()); + Object stepInstance = stepClass.getConstructor().newInstance(); if (UploadableStep.class.isAssignableFrom(stepClass)) { UploadableStep uploadableStep = (UploadableStep) stepInstance; for (MultipartFile mpFile : uploadfiles) { - ErrorRest err = uploadableStep.upload(context, - submissionService, stepConfig, wi, mpFile); + ErrorRest err = + uploadableStep.upload(context, submissionService, stepConfig, wi, mpFile); if (err != null) { errors.add(err); } @@ -449,7 +454,7 @@ private BaseObjectRest findItemRestById(Context context, String itemId) throw return authorizationRestUtil.getObject(context, objectId); } - private void merge(Context context, List records, WorkspaceItem item) throws SQLException { + private void merge(Context context, ImportRecord record, WorkspaceItem item) throws SQLException { for (MetadataValue metadataValue : itemService.getMetadata( item.getItem(), Item.ANY, Item.ANY, Item.ANY, Item.ANY)) { itemService.clearMetadata(context, item.getItem(), @@ -458,13 +463,11 @@ private void merge(Context context, List records, WorkspaceItem it metadataValue.getMetadataField().getQualifier(), metadataValue.getLanguage()); } - for (ImportRecord record : records) { - if (record != null && record.getValueList() != null) { - for (MetadatumDTO metadataValue : record.getValueList()) { - itemService.addMetadata(context, item.getItem(), metadataValue.getSchema(), - metadataValue.getElement(), metadataValue.getQualifier(), null, - metadataValue.getValue()); - } + if (record != null && record.getValueList() != null) { + for (MetadatumDTO metadataValue : record.getValueList()) { + itemService.addMetadata(context, item.getItem(), metadataValue.getSchema(), + metadataValue.getElement(), metadataValue.getQualifier(), null, + metadataValue.getValue()); } } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/handler/ExternalSourceItemUriListHandler.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/handler/ExternalSourceItemUriListHandler.java index d619100bf67a..201a7ba1633d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/handler/ExternalSourceItemUriListHandler.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/handler/ExternalSourceItemUriListHandler.java @@ -30,16 +30,19 @@ @Component public class ExternalSourceItemUriListHandler extends ExternalSourceEntryItemUriListHandler { + private Pattern pattern = Pattern.compile("\\/api\\/core\\/items\\/(.*)"); + @Autowired private ItemService itemService; @Override @SuppressWarnings("rawtypes") public boolean supports(List uriList, String method,Class clazz) { - if (clazz != Item.class) { + if (clazz != Item.class || uriList.size() != 1) { return false; } - return true; + + return pattern.matcher(uriList.get(0)).find(); } @Override @@ -61,7 +64,6 @@ public boolean validate(Context context, HttpServletRequest request, List uriList) { Item item = null; String url = uriList.get(0); - Pattern pattern = Pattern.compile("\\/api\\/core\\/items\\/(.*)"); Matcher matcher = pattern.matcher(url); if (!matcher.find()) { throw new DSpaceBadRequestException("The uri: " + url + " doesn't resolve to an item"); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java new file mode 100644 index 000000000000..b0e2a45c9d23 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java @@ -0,0 +1,79 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.repository.patch.operation; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.UUID; + +import org.dspace.app.rest.exception.RESTBitstreamNotFoundException; +import org.dspace.app.rest.model.patch.Operation; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Bitstream; +import org.dspace.content.service.BitstreamService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.access.AccessDeniedException; +import org.springframework.stereotype.Component; + +/** + * A PATCH operation for removing bitstreams in bulk from the repository. + * + * Example: + * curl -X PATCH http://${dspace.server.url}/api/core/bitstreams -H "Content-Type: application/json" + * -d '[ + * {"op": "remove", "path": "/bitstreams/${bitstream1UUID}"}, + * {"op": "remove", "path": "/bitstreams/${bitstream2UUID}"}, + * {"op": "remove", "path": "/bitstreams/${bitstream3UUID}"} + * ]' + * + * + * @author Jens Vannerum (jens.vannerum@atmire.com) + */ +@Component +public class BitstreamRemoveOperation extends PatchOperation { + @Autowired + BitstreamService bitstreamService; + @Autowired + AuthorizeService authorizeService; + public static final String OPERATION_PATH_BITSTREAM_REMOVE = "/bitstreams/"; + + @Override + public Bitstream perform(Context context, Bitstream resource, Operation operation) throws SQLException { + String bitstreamIDtoDelete = operation.getPath().replace(OPERATION_PATH_BITSTREAM_REMOVE, ""); + Bitstream bitstreamToDelete = bitstreamService.find(context, UUID.fromString(bitstreamIDtoDelete)); + if (bitstreamToDelete == null) { + throw new RESTBitstreamNotFoundException(bitstreamIDtoDelete); + } + authorizeBitstreamRemoveAction(context, bitstreamToDelete, Constants.DELETE); + + try { + bitstreamService.delete(context, bitstreamToDelete); + } catch (AuthorizeException | IOException e) { + throw new RuntimeException(e.getMessage(), e); + } + return null; + } + + @Override + public boolean supports(Object objectToMatch, Operation operation) { + return objectToMatch == null && operation.getOp().trim().equalsIgnoreCase(OPERATION_REMOVE) && + operation.getPath().trim().startsWith(OPERATION_PATH_BITSTREAM_REMOVE); + } + + public void authorizeBitstreamRemoveAction(Context context, Bitstream bitstream, int operation) + throws SQLException { + try { + authorizeService.authorizeAction(context, bitstream, operation); + } catch (AuthorizeException e) { + throw new AccessDeniedException("The current user is not allowed to remove the bitstream", e); + } + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/RegistrationEmailPatchOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/RegistrationEmailPatchOperation.java new file mode 100644 index 000000000000..e4bbd45a3f34 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/RegistrationEmailPatchOperation.java @@ -0,0 +1,166 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.repository.patch.operation; + +import java.sql.SQLException; +import java.text.MessageFormat; +import java.util.Optional; + +import com.fasterxml.jackson.databind.JsonNode; +import org.dspace.app.rest.exception.DSpaceBadRequestException; +import org.dspace.app.rest.exception.UnprocessableEntityException; +import org.dspace.app.rest.model.patch.JsonValueEvaluator; +import org.dspace.app.rest.model.patch.Operation; +import org.dspace.authorize.AuthorizeException; +import org.dspace.core.Context; +import org.dspace.eperson.RegistrationData; +import org.dspace.eperson.RegistrationTypeEnum; +import org.dspace.eperson.dto.RegistrationDataChanges; +import org.dspace.eperson.dto.RegistrationDataPatch; +import org.dspace.eperson.service.AccountService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * Implementation for RegistrationData email patches. + * + * Example: + * curl -X PATCH http://${dspace.server.url}/api/eperson/registration/<:registration-id>?token=<:token> -H " + * Content-Type: application/json" -d '[{ "op": "replace", "path": "/email", "value": "new@email"]' + * + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +@Component +public class RegistrationEmailPatchOperation extends PatchOperation { + + /** + * Path in json body of patch that uses this operation + */ + private static final String OPERATION_PATH_EMAIL = "/email"; + + @Autowired + private AccountService accountService; + + @Override + public R perform(Context context, R object, Operation operation) { + checkOperationValue(operation.getValue()); + + RegistrationDataPatch registrationDataPatch; + try { + String email = getTextValue(operation); + registrationDataPatch = + new RegistrationDataPatch( + object, + new RegistrationDataChanges( + email, + registrationTypeFor(context, object, email) + ) + ); + } catch (IllegalArgumentException e) { + throw new UnprocessableEntityException( + "Cannot perform the patch operation", + e + ); + } catch (SQLException e) { + throw new RuntimeException(e); + } + + if (!supports(object, operation)) { + throw new UnprocessableEntityException( + MessageFormat.format( + "RegistrationEmailReplaceOperation does not support {0} operation", + operation.getOp() + ) + ); + } + + if (!isOperationAllowed(operation, object)) { + throw new UnprocessableEntityException( + MessageFormat.format( + "Attempting to perform {0} operation over {1} value (e-mail).", + operation.getOp(), + object.getEmail() == null ? "null" : "not null" + ) + ); + } + + + try { + return (R) accountService.renewRegistrationForEmail(context, registrationDataPatch); + } catch (AuthorizeException e) { + throw new DSpaceBadRequestException( + MessageFormat.format( + "Cannot perform {0} operation over {1} value (e-mail).", + operation.getOp(), + object.getEmail() == null ? "null" : "not null" + ), + e + ); + } + } + + private static String getTextValue(Operation operation) { + Object value = operation.getValue(); + + if (value instanceof String) { + return ((String) value); + } + + if (value instanceof JsonValueEvaluator) { + return Optional.of((JsonValueEvaluator) value) + .map(JsonValueEvaluator::getValueNode) + .filter(nodes -> !nodes.isEmpty()) + .map(nodes -> nodes.get(0)) + .map(JsonNode::asText) + .orElseThrow(() -> new DSpaceBadRequestException("No value provided for operation")); + } + throw new DSpaceBadRequestException("Invalid patch value for operation!"); + } + + private RegistrationTypeEnum registrationTypeFor( + Context context, R object, String email + ) + throws SQLException { + if (accountService.existsAccountWithEmail(context, email)) { + return RegistrationTypeEnum.VALIDATION_ORCID; + } + return object.getRegistrationType(); + } + + + /** + * Checks whether the email of RegistrationData has an existing value to replace or adds a new value. + * + * @param operation operation to check + * @param registrationData Object on which patch is being done + */ + private boolean isOperationAllowed(Operation operation, RegistrationData registrationData) { + return isReplaceOperationAllowed(operation, registrationData) || + isAddOperationAllowed(operation, registrationData); + } + + private boolean isAddOperationAllowed(Operation operation, RegistrationData registrationData) { + return operation.getOp().trim().equalsIgnoreCase(OPERATION_ADD) && registrationData.getEmail() == null; + } + + private static boolean isReplaceOperationAllowed(Operation operation, RegistrationData registrationData) { + return operation.getOp().trim().equalsIgnoreCase(OPERATION_REPLACE) && registrationData.getEmail() != null; + } + + @Override + public boolean supports(Object objectToMatch, Operation operation) { + return (objectToMatch instanceof RegistrationData && + ( + operation.getOp().trim().equalsIgnoreCase(OPERATION_REPLACE) || + operation.getOp().trim().equalsIgnoreCase(OPERATION_ADD) + ) && + operation.getPath().trim().equalsIgnoreCase(OPERATION_PATH_EMAIL)); + } +} + diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyEndDateAddOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyEndDateAddOperation.java index b06637bad240..0d426c96d06a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyEndDateAddOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyEndDateAddOperation.java @@ -7,7 +7,6 @@ */ package org.dspace.app.rest.repository.patch.operation.resourcePolicy; -import java.text.ParseException; import java.util.Date; import org.dspace.app.rest.exception.DSpaceBadRequestException; @@ -15,6 +14,7 @@ import org.dspace.app.rest.repository.patch.operation.PatchOperation; import org.dspace.authorize.ResourcePolicy; import org.dspace.core.Context; +import org.dspace.util.MultiFormatDateParser; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -58,11 +58,10 @@ public R perform(Context context, R resource, Operation operation) { */ private void add(ResourcePolicy resourcePolicy, Operation operation) { String dateS = (String) operation.getValue(); - try { - Date date = resourcePolicyUtils.simpleDateFormat.parse(dateS); - resourcePolicy.setEndDate(date); - } catch (ParseException e) { - throw new DSpaceBadRequestException("Invalid endDate value", e); + Date date = MultiFormatDateParser.parse(dateS); + resourcePolicy.setEndDate(date); + if (date == null) { + throw new DSpaceBadRequestException("Invalid endDate value " + dateS); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyEndDateReplaceOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyEndDateReplaceOperation.java index a71224ea294d..fc4e7a63ca87 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyEndDateReplaceOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyEndDateReplaceOperation.java @@ -7,7 +7,6 @@ */ package org.dspace.app.rest.repository.patch.operation.resourcePolicy; -import java.text.ParseException; import java.util.Date; import org.dspace.app.rest.exception.DSpaceBadRequestException; @@ -15,6 +14,7 @@ import org.dspace.app.rest.repository.patch.operation.PatchOperation; import org.dspace.authorize.ResourcePolicy; import org.dspace.core.Context; +import org.dspace.util.MultiFormatDateParser; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -58,12 +58,11 @@ public R perform(Context context, R resource, Operation operation) { */ private void replace(ResourcePolicy resourcePolicy, Operation operation) { String dateS = (String) operation.getValue(); - try { - Date date = resourcePolicyUtils.simpleDateFormat.parse(dateS); - resourcePolicy.setEndDate(date); - } catch (ParseException e) { - throw new DSpaceBadRequestException("Invalid endDate value", e); + Date date = MultiFormatDateParser.parse(dateS); + if (date == null) { + throw new DSpaceBadRequestException("Invalid endDate value " + dateS); } + resourcePolicy.setEndDate(date); } @Override diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyStartDateAddOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyStartDateAddOperation.java index f8f74b65868d..f19d7043cf7d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyStartDateAddOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyStartDateAddOperation.java @@ -7,7 +7,6 @@ */ package org.dspace.app.rest.repository.patch.operation.resourcePolicy; -import java.text.ParseException; import java.util.Date; import org.dspace.app.rest.exception.DSpaceBadRequestException; @@ -15,6 +14,7 @@ import org.dspace.app.rest.repository.patch.operation.PatchOperation; import org.dspace.authorize.ResourcePolicy; import org.dspace.core.Context; +import org.dspace.util.MultiFormatDateParser; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -59,12 +59,11 @@ public R perform(Context context, R resource, Operation operation) { */ private void add(ResourcePolicy resourcePolicy, Operation operation) { String dateS = (String) operation.getValue(); - try { - Date date = resourcePolicyUtils.simpleDateFormat.parse(dateS); - resourcePolicy.setStartDate(date); - } catch (ParseException e) { - throw new DSpaceBadRequestException("Invalid startDate value", e); + Date date = MultiFormatDateParser.parse(dateS); + if (date == null) { + throw new DSpaceBadRequestException("Invalid startDate value " + dateS); } + resourcePolicy.setStartDate(date); } @Override diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyStartDateReplaceOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyStartDateReplaceOperation.java index a6812f658132..2d1425341071 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyStartDateReplaceOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyStartDateReplaceOperation.java @@ -7,7 +7,6 @@ */ package org.dspace.app.rest.repository.patch.operation.resourcePolicy; -import java.text.ParseException; import java.util.Date; import org.dspace.app.rest.exception.DSpaceBadRequestException; @@ -15,6 +14,7 @@ import org.dspace.app.rest.repository.patch.operation.PatchOperation; import org.dspace.authorize.ResourcePolicy; import org.dspace.core.Context; +import org.dspace.util.MultiFormatDateParser; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -58,12 +58,11 @@ public R perform(Context context, R resource, Operation operation) { */ private void replace(ResourcePolicy resourcePolicy, Operation operation) { String dateS = (String) operation.getValue(); - try { - Date date = resourcePolicyUtils.simpleDateFormat.parse(dateS); - resourcePolicy.setStartDate(date); - } catch (ParseException e) { - throw new DSpaceBadRequestException("Invalid startDate value", e); + Date date = MultiFormatDateParser.parse(dateS); + if (date == null) { + throw new DSpaceBadRequestException("Invalid startDate value " + dateS); } + resourcePolicy.setStartDate(date); } @Override diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyUtils.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyUtils.java index 435480e318ef..7718260be7c4 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyUtils.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyUtils.java @@ -7,13 +7,12 @@ */ package org.dspace.app.rest.repository.patch.operation.resourcePolicy; -import java.text.ParseException; -import java.text.SimpleDateFormat; import java.util.Date; import org.dspace.app.rest.exception.DSpaceBadRequestException; import org.dspace.app.rest.model.patch.Operation; import org.dspace.authorize.ResourcePolicy; +import org.dspace.util.MultiFormatDateParser; import org.springframework.stereotype.Component; /** @@ -25,8 +24,6 @@ @Component public class ResourcePolicyUtils { - public static final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd"); - /** * Paths in json body of patched that use these resourcePolicy operations */ @@ -112,13 +109,12 @@ public void checkResourcePolicyForExistingDescriptionValue(ResourcePolicy resour */ public void checkResourcePolicyForConsistentStartDateValue(ResourcePolicy resource, Operation operation) { String dateS = (String) operation.getValue(); - try { - Date date = simpleDateFormat.parse(dateS); - if (resource.getEndDate() != null && resource.getEndDate().before(date)) { - throw new DSpaceBadRequestException("Attempting to set an invalid startDate greater than the endDate."); - } - } catch (ParseException e) { - throw new DSpaceBadRequestException("Invalid startDate value", e); + Date date = MultiFormatDateParser.parse(dateS); + if (date == null) { + throw new DSpaceBadRequestException("Invalid startDate value " + dateS); + } + if (resource.getEndDate() != null && resource.getEndDate().before(date)) { + throw new DSpaceBadRequestException("Attempting to set an invalid startDate greater than the endDate."); } } @@ -134,13 +130,12 @@ public void checkResourcePolicyForConsistentStartDateValue(ResourcePolicy resour */ public void checkResourcePolicyForConsistentEndDateValue(ResourcePolicy resource, Operation operation) { String dateS = (String) operation.getValue(); - try { - Date date = simpleDateFormat.parse(dateS); - if (resource.getStartDate() != null && resource.getStartDate().after(date)) { - throw new DSpaceBadRequestException("Attempting to set an invalid endDate smaller than the startDate."); - } - } catch (ParseException e) { - throw new DSpaceBadRequestException("Invalid endDate value", e); + Date date = MultiFormatDateParser.parse(dateS); + if (date == null) { + throw new DSpaceBadRequestException("Invalid endDate value " + dateS); + } + if (resource.getStartDate() != null && resource.getStartDate().after(date)) { + throw new DSpaceBadRequestException("Attempting to set an invalid endDate smaller than the startDate."); } } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/AdminRestPermissionEvaluatorPlugin.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/AdminRestPermissionEvaluatorPlugin.java index 0d251f6400f7..338eed4a7340 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/AdminRestPermissionEvaluatorPlugin.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/AdminRestPermissionEvaluatorPlugin.java @@ -20,6 +20,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.Ordered; +import org.springframework.core.annotation.Order; import org.springframework.security.core.Authentication; import org.springframework.stereotype.Component; @@ -29,6 +31,7 @@ * the authenticated EPerson is allowed to perform the requested action. */ @Component +@Order(value = Ordered.HIGHEST_PRECEDENCE) public class AdminRestPermissionEvaluatorPlugin extends RestObjectPermissionEvaluatorPlugin { private static final Logger log = LoggerFactory.getLogger(RestObjectPermissionEvaluatorPlugin.class); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/OrcidLoginFilter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/OrcidLoginFilter.java index 9fdef6b050f7..0a50fec20803 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/OrcidLoginFilter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/OrcidLoginFilter.java @@ -7,7 +7,12 @@ */ package org.dspace.app.rest.security; +import static org.dspace.authenticate.OrcidAuthenticationBean.ORCID_AUTH_ATTRIBUTE; +import static org.dspace.authenticate.OrcidAuthenticationBean.ORCID_DEFAULT_REGISTRATION_URL; +import static org.dspace.authenticate.OrcidAuthenticationBean.ORCID_REGISTRATION_TOKEN; + import java.io.IOException; +import java.text.MessageFormat; import java.util.ArrayList; import javax.servlet.FilterChain; import javax.servlet.ServletException; @@ -43,10 +48,11 @@ public class OrcidLoginFilter extends StatelessLoginFilter { private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); private OrcidAuthenticationBean orcidAuthentication = new DSpace().getServiceManager() - .getServiceByName("orcidAuthentication", OrcidAuthenticationBean.class); + .getServiceByName("orcidAuthentication", + OrcidAuthenticationBean.class); public OrcidLoginFilter(String url, AuthenticationManager authenticationManager, - RestAuthenticationService restAuthenticationService) { + RestAuthenticationService restAuthenticationService) { super(url, authenticationManager, restAuthenticationService); } @@ -64,13 +70,13 @@ public Authentication attemptAuthentication(HttpServletRequest req, HttpServletR @Override protected void successfulAuthentication(HttpServletRequest req, HttpServletResponse res, FilterChain chain, - Authentication auth) throws IOException, ServletException { + Authentication auth) throws IOException, ServletException { DSpaceAuthentication dSpaceAuthentication = (DSpaceAuthentication) auth; log.debug("Orcid authentication successful for EPerson {}. Sending back temporary auth cookie", - dSpaceAuthentication.getName()); + dSpaceAuthentication.getName()); restAuthenticationService.addAuthenticationDataForUser(req, res, dSpaceAuthentication, true); @@ -79,26 +85,41 @@ protected void successfulAuthentication(HttpServletRequest req, HttpServletRespo @Override protected void unsuccessfulAuthentication(HttpServletRequest request, HttpServletResponse response, - AuthenticationException failed) throws IOException, ServletException { + AuthenticationException failed) throws IOException, ServletException { Context context = ContextUtil.obtainContext(request); - if (orcidAuthentication.isUsed(context, request)) { - String baseRediredirectUrl = configurationService.getProperty("dspace.ui.url"); - String redirectUrl = baseRediredirectUrl + "/error?status=401&code=orcid.generic-error"; - response.sendRedirect(redirectUrl); // lgtm [java/unvalidated-url-redirection] - } else { + if (!orcidAuthentication.isUsed(context, request)) { super.unsuccessfulAuthentication(request, response, failed); + return; + } + + String baseRediredirectUrl = configurationService.getProperty("dspace.ui.url"); + String redirectUrl = baseRediredirectUrl + "/error?status=401&code=orcid.generic-error"; + Object registrationToken = request.getAttribute(ORCID_REGISTRATION_TOKEN); + if (registrationToken != null) { + final String orcidRegistrationDataUrl = + configurationService.getProperty("orcid.registration-data.url", ORCID_DEFAULT_REGISTRATION_URL); + redirectUrl = baseRediredirectUrl + MessageFormat.format(orcidRegistrationDataUrl, registrationToken); + if (log.isDebugEnabled()) { + log.debug( + "Orcid authentication failed for user with ORCID {}.", + request.getAttribute(ORCID_AUTH_ATTRIBUTE) + ); + log.debug("Redirecting to {} for registration completion.", redirectUrl); + } } + response.sendRedirect(redirectUrl); // lgtm [java/unvalidated-url-redirection] } /** * After successful login, redirect to the DSpace URL specified by this Orcid * request (in the "redirectUrl" request parameter). If that 'redirectUrl' is * not valid or trusted for this DSpace site, then return a 400 error. - * @param request - * @param response + * + * @param request + * @param response * @throws IOException */ private void redirectAfterSuccess(HttpServletRequest request, HttpServletResponse response) throws IOException { @@ -126,9 +147,9 @@ private void redirectAfterSuccess(HttpServletRequest request, HttpServletRespons response.sendRedirect(redirectUrl); } else { log.error("Invalid Orcid redirectURL=" + redirectUrl + - ". URL doesn't match hostname of server or UI!"); + ". URL doesn't match hostname of server or UI!"); response.sendError(HttpServletResponse.SC_BAD_REQUEST, - "Invalid redirectURL! Must match server or ui hostname."); + "Invalid redirectURL! Must match server or ui hostname."); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/TemplateItemRestPermissionEvaluatorPlugin.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/TemplateItemRestPermissionEvaluatorPlugin.java new file mode 100644 index 000000000000..cb977dff3aef --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/TemplateItemRestPermissionEvaluatorPlugin.java @@ -0,0 +1,83 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.security; + +import java.io.Serializable; +import java.sql.SQLException; +import java.util.UUID; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.app.rest.model.TemplateItemRest; +import org.dspace.app.rest.utils.ContextUtil; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Collection; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.services.RequestService; +import org.dspace.services.model.Request; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.core.Authentication; +import org.springframework.stereotype.Component; + +/** + * {@link RestObjectPermissionEvaluatorPlugin} class that evaluate WRITE and DELETE permission over a TemplateItem + * + * @author Bui Thai Hai (thaihai.bui@dlcorp.com.vn) + */ +@Component +public class TemplateItemRestPermissionEvaluatorPlugin extends RestObjectPermissionEvaluatorPlugin { + + private static final Logger log = LoggerFactory.getLogger(TemplateItemRestPermissionEvaluatorPlugin.class); + + @Autowired + private RequestService requestService; + + @Autowired + ItemService its; + + @Autowired + private AuthorizeService authorizeService; + + @Override + public boolean hasDSpacePermission(Authentication authentication, Serializable targetId, String targetType, + DSpaceRestPermission permission) { + + DSpaceRestPermission restPermission = DSpaceRestPermission.convert(permission); + if (!DSpaceRestPermission.WRITE.equals(restPermission) && + !DSpaceRestPermission.DELETE.equals(restPermission)) { + return false; + } + if (!StringUtils.equalsIgnoreCase(targetType, TemplateItemRest.NAME)) { + return false; + } + + Request request = requestService.getCurrentRequest(); + Context context = ContextUtil.obtainContext(request.getHttpServletRequest()); + + EPerson ePerson = context.getCurrentUser(); + if (ePerson == null) { + return false; + } + // Allow collection's admin to edit/delete the template + + UUID dsoId = UUID.fromString(targetId.toString()); + requestService.getCurrentRequest().getHttpServletRequest().getRequestURL(); + try { + Collection coll = its.find(context, dsoId).getTemplateItemOf(); + if (authorizeService.isAdmin(context, coll)) { + return true; + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + } + return false; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/WebSecurityConfiguration.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/WebSecurityConfiguration.java index c385d8b1be03..bf91cad5540e 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/WebSecurityConfiguration.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/WebSecurityConfiguration.java @@ -91,7 +91,7 @@ protected void configure(HttpSecurity http) throws Exception { // Configure authentication requirements for ${dspace.server.url}/api/ URL only // NOTE: REST API is hardcoded to respond on /api/. Other modules (OAI, SWORD, IIIF, etc) use other root paths. http.requestMatchers() - .antMatchers("/api/**", "/iiif/**", actuatorBasePath + "/**") + .antMatchers("/api/**", "/iiif/**", actuatorBasePath + "/**", "/signposting/**") .and() // Enable Spring Security authorization on these paths .authorizeRequests() diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java new file mode 100644 index 000000000000..2a940d79aba4 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java @@ -0,0 +1,194 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.controller; + +import static java.lang.String.format; +import static java.util.Objects.isNull; +import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.List; +import java.util.UUID; +import java.util.stream.Collectors; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.dspace.app.rest.converter.ConverterService; +import org.dspace.app.rest.signposting.converter.LinksetRestMessageConverter; +import org.dspace.app.rest.signposting.model.Linkset; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRest; +import org.dspace.app.rest.signposting.model.TypedLinkRest; +import org.dspace.app.rest.signposting.service.LinksetService; +import org.dspace.app.rest.signposting.utils.LinksetMapper; +import org.dspace.app.rest.utils.ContextUtil; +import org.dspace.app.rest.utils.Utils; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Bitstream; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.crosswalk.CrosswalkException; +import org.dspace.content.crosswalk.DisseminationCrosswalk; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.core.factory.CoreServiceFactory; +import org.dspace.core.service.PluginService; +import org.dspace.services.ConfigurationService; +import org.jdom2.Element; +import org.jdom2.output.Format; +import org.jdom2.output.XMLOutputter; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.security.access.AccessDeniedException; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +/** + * This RestController takes care of the retrieval of {@link LinksetRest}. + * This class will receive the UUID of an {@link Item} or {@link Bitstream}. + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +@RestController +@RequestMapping("/${signposting.path:signposting}") +@ConditionalOnProperty("signposting.enabled") +public class LinksetRestController { + + @Autowired + private Utils utils; + @Autowired + private BitstreamService bitstreamService; + @Autowired + private ItemService itemService; + @Autowired + private ConverterService converter; + @Autowired + private LinksetService linksetService; + @Autowired + private ConfigurationService configurationService; + private final PluginService pluginService = CoreServiceFactory.getInstance().getPluginService(); + + @PreAuthorize("permitAll()") + @RequestMapping(method = RequestMethod.GET) + public ResponseEntity getAll() { + return ResponseEntity.status(HttpStatus.METHOD_NOT_ALLOWED).build(); + } + + @PreAuthorize("hasPermission(#uuid, 'ITEM', 'READ')") + @RequestMapping( + value = "/linksets" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID + "/json", + method = RequestMethod.GET, + produces = "application/linkset+json" + ) + public LinksetRest getJson(HttpServletRequest request, @PathVariable UUID uuid) { + try { + Context context = ContextUtil.obtainContext(request); + + Item item = itemService.find(context, uuid); + if (item == null) { + throw new ResourceNotFoundException("No such Item: " + uuid); + } + verifyItemIsDiscoverable(item); + List> linksetNodes = linksetService + .createLinksetNodesForMultipleLinksets(request, context, item); + List linksets = linksetNodes.stream().map(LinksetMapper::map).collect(Collectors.toList()); + return converter.toRest(linksets, utils.obtainProjection()); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @PreAuthorize("hasPermission(#uuid, 'ITEM', 'READ')") + @RequestMapping( + value = "/linksets" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID, + method = RequestMethod.GET, + produces = "application/linkset" + ) + public String getLset(HttpServletRequest request, @PathVariable UUID uuid) { + try { + Context context = ContextUtil.obtainContext(request); + Item item = itemService.find(context, uuid); + if (item == null) { + throw new ResourceNotFoundException("No such Item: " + uuid); + } + verifyItemIsDiscoverable(item); + List> linksetNodes = linksetService + .createLinksetNodesForMultipleLinksets(request, context, item); + return LinksetRestMessageConverter.convert(linksetNodes); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + // In @PreAuthorize(...) we're using "&&" (and) instead of "||" (or) because if hasPermission() is unable + // to find object of specified type with specified uuid it returns "true". + // For example: if we pass uuid of Bitstream: hasPermission(#uuid, 'ITEM', 'READ') returns "true", because + // it will use ItemService with uuid of bitstream. + @PreAuthorize("hasPermission(#uuid, 'ITEM', 'READ') && hasPermission(#uuid, 'BITSTREAM', 'READ')") + @RequestMapping(value = "/links" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID, method = RequestMethod.GET) + public List getHeader(HttpServletRequest request, @PathVariable UUID uuid) { + Context context = ContextUtil.obtainContext(request); + DSpaceObject dso = findObject(context, uuid); + List linksetNodes = linksetService.createLinksetNodesForSingleLinkset(request, context, dso); + return linksetNodes.stream() + .map(node -> new TypedLinkRest(node.getLink(), node.getRelation(), node.getType())) + .collect(Collectors.toList()); + } + + @PreAuthorize("hasPermission(#uuid, 'ITEM', 'READ')") + @RequestMapping(value = "/describedby" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID, method = RequestMethod.GET) + public String getDescribedBy( + HttpServletRequest request, + HttpServletResponse response, + @PathVariable UUID uuid + ) throws SQLException, AuthorizeException, IOException, CrosswalkException { + Context context = ContextUtil.obtainContext(request); + String xwalkName = configurationService.getProperty("signposting.describedby.crosswalk-name"); + String responseMimeType = configurationService.getProperty("signposting.describedby.mime-type"); + response.addHeader("Content-Type", responseMimeType); + + DSpaceObject object = findObject(context, uuid); + DisseminationCrosswalk xwalk = (DisseminationCrosswalk) + pluginService.getNamedPlugin(DisseminationCrosswalk.class, xwalkName); + List elements = xwalk.disseminateList(context, object); + XMLOutputter outputter = new XMLOutputter(Format.getCompactFormat()); + return outputter.outputString(elements); + } + + private DSpaceObject findObject(Context context, UUID uuid) { + try { + DSpaceObject object = itemService.find(context, uuid); + if (isNull(object)) { + object = bitstreamService.find(context, uuid); + if (isNull(object)) { + throw new ResourceNotFoundException("No such resource: " + uuid); + } + } else { + verifyItemIsDiscoverable((Item) object); + } + return object; + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + private static void verifyItemIsDiscoverable(Item item) { + if (!item.isDiscoverable()) { + String message = format("Item with uuid [%s] is not Discoverable", item.getID().toString()); + throw new AccessDeniedException(message); + } + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetConverter.java new file mode 100644 index 000000000000..90786b9dc426 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetConverter.java @@ -0,0 +1,39 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.converter; + +import java.util.List; + +import org.dspace.app.rest.converter.DSpaceConverter; +import org.dspace.app.rest.projection.Projection; +import org.dspace.app.rest.signposting.model.Linkset; +import org.dspace.app.rest.signposting.model.LinksetRest; +import org.springframework.stereotype.Component; + + +/** + * This is the converter from/to the Linkset in the DSpace API data model and the REST data model. + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +@Component +public class LinksetConverter implements DSpaceConverter, LinksetRest> { + + @Override + public LinksetRest convert(List linksets, Projection projection) { + LinksetRest linksetRest = new LinksetRest(); + linksetRest.setProjection(projection); + linksetRest.setLinkset(linksets); + return linksetRest; + } + + @Override + public Class> getModelClass() { + return (Class>) ((Class) List.class); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetRestMessageConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetRestMessageConverter.java new file mode 100644 index 000000000000..24c8e6735dc9 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetRestMessageConverter.java @@ -0,0 +1,51 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.converter; + +import static java.lang.String.format; +import static java.util.Objects.nonNull; +import static org.apache.commons.lang.StringUtils.isNotBlank; + +import java.util.List; + +import org.dspace.app.rest.signposting.model.LinksetNode; + +/** + * Converter for converting list of linkset nodes into application/linkset format. + */ +public class LinksetRestMessageConverter { + + private LinksetRestMessageConverter() { + } + + /** + * Converts list of linkset nodes into string of application/linkset format. + * + * @param linksetNodes link of linkset nodes + * @return string of application/linkset format. + */ + public static String convert(List> linksetNodes) { + StringBuilder responseBody = new StringBuilder(); + linksetNodes.stream().flatMap(List::stream).forEach(linksetNode -> { + if (isNotBlank(linksetNode.getLink())) { + responseBody.append(format("<%s> ", linksetNode.getLink())); + } + if (nonNull(linksetNode.getRelation())) { + responseBody.append(format("; rel=\"%s\" ", linksetNode.getRelation().getName())); + } + if (isNotBlank(linksetNode.getType())) { + responseBody.append(format("; type=\"%s\" ", linksetNode.getType())); + } + if (isNotBlank(linksetNode.getAnchor())) { + responseBody.append(format("; anchor=\"%s\" ", linksetNode.getAnchor())); + } + responseBody.append(", "); + }); + return responseBody.toString(); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/hateoas/LinksetResource.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/hateoas/LinksetResource.java new file mode 100644 index 000000000000..8a0c2158d1ea --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/hateoas/LinksetResource.java @@ -0,0 +1,26 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.hateoas; + +import org.dspace.app.rest.model.hateoas.DSpaceResource; +import org.dspace.app.rest.model.hateoas.annotations.RelNameDSpaceResource; +import org.dspace.app.rest.signposting.model.LinksetRest; +import org.dspace.app.rest.utils.Utils; + +/** + * Linkset Rest HAL Resource. The HAL Resource wraps the REST Resource + * adding support for the links and embedded resources + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +@RelNameDSpaceResource(LinksetRest.NAME) +public class LinksetResource extends DSpaceResource { + public LinksetResource(LinksetRest linkset, Utils utils) { + super(linkset, utils); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Linkset.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Linkset.java new file mode 100644 index 000000000000..14d6f6581c7a --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Linkset.java @@ -0,0 +1,139 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.model; + +import java.util.ArrayList; +import java.util.List; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * DTO object represents a set of links. + */ +public class Linkset { + + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List author; + @JsonProperty("cite-as") + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List citeAs; + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List item; + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List collection; + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List type; + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List license; + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List linkset; + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List describes; + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List describedby; + @JsonInclude(JsonInclude.Include.NON_NULL) + private String anchor; + + public List getAuthor() { + if (this.author == null) { + this.author = new ArrayList<>(); + } + return author; + } + public void setAuthor(List author) { + this.author = author; + } + + public List getCiteAs() { + if (this.citeAs == null) { + this.citeAs = new ArrayList<>(); + } + return citeAs; + } + public void setCiteAs(List citeAs) { + this.citeAs = citeAs; + } + + public List getItem() { + if (this.item == null) { + this.item = new ArrayList<>(); + } + return item; + } + public void setItem(List item) { + this.item = item; + } + + public List getCollection() { + if (this.collection == null) { + this.collection = new ArrayList<>(); + } + return collection; + } + public void setCollection(List collection) { + this.collection = collection; + } + + public List getType() { + if (type == null) { + type = new ArrayList<>(); + } + return type; + } + public void setType(List type) { + this.type = type; + } + + public List getLicense() { + if (license == null) { + license = new ArrayList<>(); + } + return license; + } + public void setLicense(List license) { + this.license = license; + } + + public List getLinkset() { + if (linkset == null) { + linkset = new ArrayList<>(); + } + return linkset; + } + public void setLinkset(List linkset) { + this.linkset = linkset; + } + + public List getDescribes() { + if (describes == null) { + describes = new ArrayList<>(); + } + return describes; + } + public void setDescribes(List describes) { + this.describes = describes; + } + + public List getDescribedby() { + if (describedby == null) { + describes = new ArrayList<>(); + } + return describedby; + } + public void setDescribedby(List describedby) { + this.describedby = describedby; + } + + public String getAnchor() { + return anchor; + } + public void setAnchor(String anchor) { + this.anchor = anchor; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetNode.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetNode.java new file mode 100644 index 000000000000..8c7347350faa --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetNode.java @@ -0,0 +1,68 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.model; + +import com.fasterxml.jackson.annotation.JsonInclude; + +/** + * DTO object represents a node of a link set. + */ +public class LinksetNode { + + @JsonInclude(JsonInclude.Include.NON_NULL) + private String link; + @JsonInclude(JsonInclude.Include.NON_NULL) + private LinksetRelationType relation; + @JsonInclude(JsonInclude.Include.NON_NULL) + private String type; + @JsonInclude(JsonInclude.Include.NON_NULL) + private String anchor; + + public LinksetNode(String link, LinksetRelationType relation, String type, String anchor) { + this(link, relation, anchor); + this.type = type; + } + + public LinksetNode(String link, LinksetRelationType relation, String anchor) { + this.link = link; + this.relation = relation; + this.anchor = anchor; + } + + public String getLink() { + return link; + } + + public void setLink(String link) { + this.link = link; + } + + public LinksetRelationType getRelation() { + return relation; + } + + public void setRelation(LinksetRelationType relation) { + this.relation = relation; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getAnchor() { + return anchor; + } + + public void setAnchor(String anchor) { + this.anchor = anchor; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRelation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRelation.java new file mode 100644 index 000000000000..ecbb786079d0 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRelation.java @@ -0,0 +1,34 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.model; + +import com.fasterxml.jackson.annotation.JsonInclude; + +/** + * DTO object represents a relation to specific resource. + */ +public class LinksetRelation { + + @JsonInclude(JsonInclude.Include.NON_NULL) + private String href; + @JsonInclude(JsonInclude.Include.NON_NULL) + private String type; + + public LinksetRelation(String href, String type) { + this.href = href; + this.type = type; + } + + public String getHref() { + return href; + } + + public String getType() { + return type; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRelationType.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRelationType.java new file mode 100644 index 000000000000..285bf5a56ee1 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRelationType.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.model; + +import com.fasterxml.jackson.annotation.JsonValue; + +/** + * An enumeration that holds track of linkset relation types. + */ +public enum LinksetRelationType { + + ITEM("item"), + CITE_AS("cite-as"), + AUTHOR("author"), + TYPE("type"), + LICENSE("license"), + COLLECTION("collection"), + LINKSET("linkset"), + DESCRIBES("describes"), + DESCRIBED_BY("describedby"); + + private final String name; + + LinksetRelationType(String name) { + this.name = name; + } + + @JsonValue + public String getName() { + return name; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRest.java new file mode 100644 index 000000000000..df80cd5c2d50 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRest.java @@ -0,0 +1,61 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.model; + +import java.util.ArrayList; +import java.util.List; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import org.dspace.app.rest.RestResourceController; +import org.dspace.app.rest.model.LinksRest; +import org.dspace.app.rest.model.RestAddressableModel; + +/** + * The REST object for the Linkset objects. + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +@LinksRest +public class LinksetRest extends RestAddressableModel { + public static final String NAME = "linkset"; + public static final String PLURAL_NAME = "linksets"; + public static final String CATEGORY = RestAddressableModel.CORE; + + public static final String JSON = "json"; + + @JsonInclude(Include.NON_EMPTY) + private List linkset; + + public List getLinkset() { + if (this.linkset == null) { + this.linkset = new ArrayList<>(); + } + return linkset; + } + public void setLinkset(List linkset) { + this.linkset = linkset; + } + + @JsonIgnore + @Override + public String getType() { + return NAME; + } + + @Override + public String getCategory() { + return CATEGORY; + } + + @Override + public Class getController() { + return RestResourceController.class; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/MetadataConfiguration.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/MetadataConfiguration.java new file mode 100644 index 000000000000..c49b32834686 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/MetadataConfiguration.java @@ -0,0 +1,52 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.model; + +/** + * Represents metadata handle configuration. + */ +public class MetadataConfiguration { + + private String metadataField; + + private String pattern; + + private String mimeType; + + public MetadataConfiguration() { + } + + public MetadataConfiguration(String metadataField, String pattern) { + this.metadataField = metadataField; + this.pattern = pattern; + } + + public String getMetadataField() { + return metadataField; + } + + public void setMetadataField(String metadataField) { + this.metadataField = metadataField; + } + + public String getPattern() { + return pattern; + } + + public void setPattern(String pattern) { + this.pattern = pattern; + } + + public String getMimeType() { + return mimeType; + } + + public void setMimeType(String mimeType) { + this.mimeType = mimeType; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/TypedLinkRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/TypedLinkRest.java new file mode 100644 index 000000000000..3ba09bf1094c --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/TypedLinkRest.java @@ -0,0 +1,74 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.model; + +import com.fasterxml.jackson.annotation.JsonInclude; +import org.dspace.app.rest.RestResourceController; +import org.dspace.app.rest.model.LinksRest; +import org.dspace.app.rest.model.RestAddressableModel; + +/** + * The REST object represents Typed Link. + */ +@LinksRest +@JsonInclude(JsonInclude.Include.NON_EMPTY) +public class TypedLinkRest extends RestAddressableModel { + public static final String NAME = "linkset"; + public static final String PLURAL_NAME = "linksets"; + public static final String CATEGORY = RestAddressableModel.CORE; + + private String href; + + private LinksetRelationType rel; + + private String type; + + public TypedLinkRest() { + } + + public TypedLinkRest(String href, LinksetRelationType rel, String type) { + this.href = href; + this.rel = rel; + this.type = type; + } + + public String getHref() { + return href; + } + + public void setHref(String href) { + this.href = href; + } + + public LinksetRelationType getRel() { + return rel; + } + + public void setRel(LinksetRelationType rel) { + this.rel = rel; + } + + public void setType(String type) { + this.type = type; + } + + @Override + public String getType() { + return type; + } + + @Override + public String getCategory() { + return CATEGORY; + } + + @Override + public Class getController() { + return RestResourceController.class; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/AbstractSignPostingProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/AbstractSignPostingProcessor.java new file mode 100644 index 000000000000..32368a57d595 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/AbstractSignPostingProcessor.java @@ -0,0 +1,50 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor; + +import org.dspace.app.rest.signposting.model.LinksetRelationType; + +/** + * An abstract class of generic signposting relation. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public abstract class AbstractSignPostingProcessor { + + private String metadataField; + + private LinksetRelationType relation; + + private String pattern; + + public String getMetadataField() { + return metadataField; + } + + public void setMetadataField(String metadataField) { + this.metadataField = metadataField; + } + + public LinksetRelationType getRelation() { + return relation; + } + + public void setRelation(LinksetRelationType relation) { + this.relation = relation; + } + + public String getPattern() { + return pattern; + } + + public void setPattern(String pattern) { + this.pattern = pattern; + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/SignPostingProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/SignPostingProcessor.java new file mode 100644 index 000000000000..efcfd50ab512 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/SignPostingProcessor.java @@ -0,0 +1,35 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.content.DSpaceObject; +import org.dspace.core.Context; + +/** + * SignPostingProcessor interface. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public interface SignPostingProcessor { + + /** + * Method for adding new linkset nodes into {@code linksetNodes}. + * + * @param context context + * @param request request + * @param object object + * @param linksetNodes linkset nodes + */ + void addLinkSetNodes(Context context, HttpServletRequest request, + T object, List linksetNodes); +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamLinksetProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamLinksetProcessor.java new file mode 100644 index 000000000000..c65191cb0749 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamLinksetProcessor.java @@ -0,0 +1,66 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.bitstream; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.content.Bitstream; +import org.dspace.content.Item; +import org.dspace.content.service.BitstreamService; +import org.dspace.core.Context; +import org.dspace.services.ConfigurationService; +import org.dspace.util.FrontendUrlService; + +/** + * An extension of {@link BitstreamSignpostingProcessor} for the linkset relation. + */ +public class BitstreamLinksetProcessor extends BitstreamSignpostingProcessor { + + private static final Logger log = Logger.getLogger(BitstreamLinksetProcessor.class); + + private final BitstreamService bitstreamService; + + private final ConfigurationService configurationService; + + public BitstreamLinksetProcessor(FrontendUrlService frontendUrlService, + BitstreamService bitstreamService, + ConfigurationService configurationService) { + super(frontendUrlService); + this.bitstreamService = bitstreamService; + this.configurationService = configurationService; + setRelation(LinksetRelationType.LINKSET); + } + + @Override + public void addLinkSetNodes(Context context, HttpServletRequest request, + Bitstream bitstream, List linksetNodes) { + try { + Item item = (Item) bitstreamService.getParentObject(context, bitstream); + if (item != null) { + String signpostingPath = configurationService.getProperty("signposting.path"); + String baseUrl = configurationService.getProperty("dspace.ui.url"); + + String linksetUrl = String.format("%s/%s/linksets/%s", baseUrl, signpostingPath, item.getID()); + String linksetJsonUrl = linksetUrl + "/json"; + List links = List.of( + new LinksetNode(linksetUrl, getRelation(), "application/linkset", buildAnchor(bitstream)), + new LinksetNode(linksetJsonUrl, getRelation(), "application/linkset+json", + buildAnchor(bitstream)) + ); + linksetNodes.addAll(links); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamParentItemProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamParentItemProcessor.java new file mode 100644 index 000000000000..815d7817d4cf --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamParentItemProcessor.java @@ -0,0 +1,56 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.bitstream; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.content.Bitstream; +import org.dspace.content.Item; +import org.dspace.content.service.BitstreamService; +import org.dspace.core.Context; +import org.dspace.util.FrontendUrlService; + +/** + * An extension of {@link BitstreamSignpostingProcessor} for the collection relation. + * It links the Bitstream to the parent Item. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class BitstreamParentItemProcessor extends BitstreamSignpostingProcessor { + + private static final Logger log = Logger.getLogger(BitstreamParentItemProcessor.class); + + private final BitstreamService bitstreamService; + + public BitstreamParentItemProcessor(FrontendUrlService frontendUrlService, + BitstreamService bitstreamService) { + super(frontendUrlService); + this.bitstreamService = bitstreamService; + setRelation(LinksetRelationType.COLLECTION); + } + + @Override + public void addLinkSetNodes(Context context, HttpServletRequest request, + Bitstream bitstream, List linksetNodes) { + try { + Item item = (Item) bitstreamService.getParentObject(context, bitstream); + if (item != null) { + String itemUiUrl = frontendUrlService.generateUrl(context, item); + linksetNodes.add(new LinksetNode(itemUiUrl, getRelation(), "text/html", buildAnchor(bitstream))); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamSignpostingProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamSignpostingProcessor.java new file mode 100644 index 000000000000..b0f251edb5ee --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamSignpostingProcessor.java @@ -0,0 +1,33 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.bitstream; + +import org.dspace.app.rest.signposting.processor.AbstractSignPostingProcessor; +import org.dspace.app.rest.signposting.processor.SignPostingProcessor; +import org.dspace.content.Bitstream; +import org.dspace.util.FrontendUrlService; + +/** + * An abstract class represents {@link SignPostingProcessor } for a bitstream. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public abstract class BitstreamSignpostingProcessor extends AbstractSignPostingProcessor + implements SignPostingProcessor { + + protected final FrontendUrlService frontendUrlService; + + public BitstreamSignpostingProcessor(FrontendUrlService frontendUrlService) { + this.frontendUrlService = frontendUrlService; + } + + public String buildAnchor(Bitstream bitstream) { + return frontendUrlService.generateUrl(bitstream); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamTypeProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamTypeProcessor.java new file mode 100644 index 000000000000..005a8009836d --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamTypeProcessor.java @@ -0,0 +1,58 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.bitstream; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang3.StringUtils; +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.content.Bitstream; +import org.dspace.content.Item; +import org.dspace.content.service.BitstreamService; +import org.dspace.core.Context; +import org.dspace.util.FrontendUrlService; +import org.dspace.util.SimpleMapConverter; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * An extension of {@link BitstreamSignpostingProcessor} for the type relation. + * Provides links to a specific type from schema.org. + */ +public class BitstreamTypeProcessor extends BitstreamSignpostingProcessor { + + private static final Logger log = Logger.getLogger(BitstreamTypeProcessor.class); + + @Autowired + private SimpleMapConverter mapConverterDSpaceToSchemaOrgUri; + + @Autowired + private BitstreamService bitstreamService; + + public BitstreamTypeProcessor(FrontendUrlService frontendUrlService) { + super(frontendUrlService); + setRelation(LinksetRelationType.TYPE); + } + + @Override + public void addLinkSetNodes(Context context, HttpServletRequest request, + Bitstream bitstream, List linksetNodes) { + try { + String type = bitstreamService.getMetadataFirstValue(bitstream, "dc", "type", null, Item.ANY); + if (StringUtils.isNotBlank(type)) { + String typeSchemeUri = mapConverterDSpaceToSchemaOrgUri.getValue(type); + linksetNodes.add(new LinksetNode(typeSchemeUri, getRelation(), buildAnchor(bitstream))); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemAuthorProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemAuthorProcessor.java new file mode 100644 index 000000000000..1bb215c46864 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemAuthorProcessor.java @@ -0,0 +1,85 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.item; + +import static java.util.Objects.nonNull; +import static org.apache.commons.lang.StringUtils.isNotBlank; +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.dspace.content.Item.ANY; + +import java.text.MessageFormat; +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.MetadataSchemaEnum; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.util.FrontendUrlService; + +/** + * An extension of {@link ItemSignpostingProcessor} for the author relation. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemAuthorProcessor extends ItemSignpostingProcessor { + + /** + * log4j category + */ + private static final Logger log = Logger.getLogger(ItemAuthorProcessor.class); + + private final ItemService itemService; + + private String orcidMetadata; + + public ItemAuthorProcessor(FrontendUrlService frontendUrlService, + ItemService itemService) { + super(frontendUrlService); + this.itemService = itemService; + setRelation(LinksetRelationType.AUTHOR); + } + + public String getOrcidMetadata() { + return orcidMetadata; + } + + public void setOrcidMetadata(String orcidMetadata) { + this.orcidMetadata = orcidMetadata; + } + + @Override + public void addLinkSetNodes(Context context, HttpServletRequest request, + Item item, List linksetNodes) { + try { + String authorId = itemService.getMetadataFirstValue(item, MetadataSchemaEnum.RELATION.getName(), + "isAuthorOfPublication", null, ANY); + if (isNotBlank(authorId)) { + Item author = itemService.findByIdOrLegacyId(context, authorId); + if (nonNull(author)) { + String authorOrcid = itemService.getMetadataFirstValue( + author, new MetadataFieldName(getOrcidMetadata()), ANY + ); + if (isNotBlank(authorOrcid)) { + String authorLink = isBlank(getPattern()) + ? authorOrcid + : MessageFormat.format(getPattern(), authorOrcid); + linksetNodes.add(new LinksetNode(authorLink, getRelation(), buildAnchor(context, item))); + } + } + } + } catch (Exception e) { + log.error("Problem to add signposting pattern", e); + } + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemContentBitstreamsProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemContentBitstreamsProcessor.java new file mode 100644 index 000000000000..61bf371adbdf --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemContentBitstreamsProcessor.java @@ -0,0 +1,61 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.item; + +import java.sql.SQLException; +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Item; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.util.FrontendUrlService; + +/** + * An extension of {@link ItemSignpostingProcessor} for the item relation. + * It links item with its content. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemContentBitstreamsProcessor extends ItemSignpostingProcessor { + + /** + * log4j category + */ + private static final Logger log = Logger.getLogger(ItemContentBitstreamsProcessor.class); + + public ItemContentBitstreamsProcessor(FrontendUrlService frontendUrlService) { + super(frontendUrlService); + setRelation(LinksetRelationType.ITEM); + } + + @Override + public void addLinkSetNodes(Context context, HttpServletRequest request, + Item item, List linksetNodes) { + try { + for (Bundle bundle : item.getBundles(Constants.CONTENT_BUNDLE_NAME)) { + for (Bitstream bitstream : bundle.getBitstreams()) { + String mimeType = bitstream.getFormat(context).getMIMEType(); + String bitstreamUrl = frontendUrlService.generateUrl(bitstream); + linksetNodes.add( + new LinksetNode(bitstreamUrl, getRelation(), mimeType, buildAnchor(context, item)) + ); + } + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemDescribedbyProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemDescribedbyProcessor.java new file mode 100644 index 000000000000..a16770c4d103 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemDescribedbyProcessor.java @@ -0,0 +1,50 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.item; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.services.ConfigurationService; +import org.dspace.util.FrontendUrlService; + +/** + * An extension of {@link ItemSignpostingProcessor} for the describedby relation. + */ +public class ItemDescribedbyProcessor extends ItemSignpostingProcessor { + + private static final Logger log = Logger.getLogger(ItemDescribedbyProcessor.class); + + private final ConfigurationService configurationService; + + public ItemDescribedbyProcessor(FrontendUrlService frontendUrlService, ConfigurationService configurationService) { + super(frontendUrlService); + this.configurationService = configurationService; + setRelation(LinksetRelationType.DESCRIBED_BY); + } + + @Override + public void addLinkSetNodes(Context context, HttpServletRequest request, + Item item, List linksetNodes) { + try { + String signpostingPath = configurationService.getProperty("signposting.path"); + String baseUrl = configurationService.getProperty("dspace.ui.url"); + String mimeType = configurationService.getProperty("signposting.describedby.mime-type"); + String describedByUrl = baseUrl + "/" + signpostingPath + "/describedby/" + item.getID(); + LinksetNode node = new LinksetNode(describedByUrl, getRelation(), mimeType, buildAnchor(context, item)); + linksetNodes.add(node); + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemIdentifierProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemIdentifierProcessor.java new file mode 100644 index 000000000000..c5ebe958d97d --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemIdentifierProcessor.java @@ -0,0 +1,54 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.item; + +import static java.util.Objects.nonNull; +import static org.apache.commons.lang3.StringUtils.isNotBlank; + +import java.text.MessageFormat; +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.util.FrontendUrlService; + +/** + * An extension of {@link ItemSignpostingProcessor} for the identifier relation. + * Identifier metadata can be specified with metadataField in configuration. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemIdentifierProcessor extends ItemSignpostingProcessor { + + private final ItemService itemService; + + public ItemIdentifierProcessor(FrontendUrlService frontendUrlService, ItemService itemService) { + super(frontendUrlService); + this.itemService = itemService; + setRelation(LinksetRelationType.CITE_AS); + } + + @Override + public void addLinkSetNodes(Context context, HttpServletRequest request, + Item item, List linksetNodes) { + String identifier = itemService + .getMetadataFirstValue(item, new MetadataFieldName(getMetadataField()), Item.ANY); + if (nonNull(identifier)) { + if (isNotBlank(getPattern())) { + identifier = MessageFormat.format(getPattern(), item); + } + linksetNodes.add(new LinksetNode(identifier, getRelation(), buildAnchor(context, item))); + } + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLicenseProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLicenseProcessor.java new file mode 100644 index 000000000000..1a26fa7695b1 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLicenseProcessor.java @@ -0,0 +1,51 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.item; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang3.StringUtils; +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.license.factory.LicenseServiceFactory; +import org.dspace.license.service.CreativeCommonsService; +import org.dspace.util.FrontendUrlService; + +/** + * An extension of {@link ItemSignpostingProcessor} for the license relation. + */ +public class ItemLicenseProcessor extends ItemSignpostingProcessor { + + private static final Logger log = Logger.getLogger(ItemLicenseProcessor.class); + + private final CreativeCommonsService creativeCommonsService = + LicenseServiceFactory.getInstance().getCreativeCommonsService(); + + public ItemLicenseProcessor(FrontendUrlService frontendUrlService) { + super(frontendUrlService); + setRelation(LinksetRelationType.LICENSE); + } + + @Override + public void addLinkSetNodes(Context context, HttpServletRequest request, + Item item, List linksetNodes) { + try { + String licenseUrl = creativeCommonsService.getLicenseURL(context, item); + if (StringUtils.isNotBlank(licenseUrl)) { + linksetNodes.add(new LinksetNode(licenseUrl, getRelation(), buildAnchor(context, item))); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLinksetProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLinksetProcessor.java new file mode 100644 index 000000000000..9008a28e29a6 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLinksetProcessor.java @@ -0,0 +1,58 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.item; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.services.ConfigurationService; +import org.dspace.util.FrontendUrlService; + +/** + * An extension of {@link ItemSignpostingProcessor} for the linkset relation. + */ +public class ItemLinksetProcessor extends ItemSignpostingProcessor { + + private static final Logger log = Logger.getLogger(ItemLinksetProcessor.class); + + private final ConfigurationService configurationService; + + public ItemLinksetProcessor(FrontendUrlService frontendUrlService, + ConfigurationService configurationService) { + super(frontendUrlService); + this.configurationService = configurationService; + setRelation(LinksetRelationType.LINKSET); + } + + @Override + public void addLinkSetNodes(Context context, HttpServletRequest request, + Item item, List linksetNodes) { + try { + if (item != null) { + String signpostingPath = configurationService.getProperty("signposting.path"); + String baseUrl = configurationService.getProperty("dspace.ui.url"); + + String linksetUrl = String.format("%s/%s/linksets/%s", baseUrl, signpostingPath, item.getID()); + String linksetJsonUrl = linksetUrl + "/json"; + String anchor = buildAnchor(context, item); + List links = List.of( + new LinksetNode(linksetUrl, getRelation(), "application/linkset", anchor), + new LinksetNode(linksetJsonUrl, getRelation(), "application/linkset+json", anchor) + ); + linksetNodes.addAll(links); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemSignpostingProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemSignpostingProcessor.java new file mode 100644 index 000000000000..2ec26632a7e0 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemSignpostingProcessor.java @@ -0,0 +1,34 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.item; + +import org.dspace.app.rest.signposting.processor.AbstractSignPostingProcessor; +import org.dspace.app.rest.signposting.processor.SignPostingProcessor; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.util.FrontendUrlService; + +/** + * An abstract class represents {@link SignPostingProcessor } for an item. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public abstract class ItemSignpostingProcessor extends AbstractSignPostingProcessor + implements SignPostingProcessor { + + protected final FrontendUrlService frontendUrlService; + + public ItemSignpostingProcessor(FrontendUrlService frontendUrlService) { + this.frontendUrlService = frontendUrlService; + } + + public String buildAnchor(Context context, Item item) { + return frontendUrlService.generateUrl(context, item); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemTypeProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemTypeProcessor.java new file mode 100644 index 000000000000..ddd2da12d59a --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemTypeProcessor.java @@ -0,0 +1,61 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.item; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang3.StringUtils; +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.content.Item; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.util.FrontendUrlService; +import org.dspace.util.SimpleMapConverter; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * An extension of {@link ItemSignpostingProcessor} for the type relation. + * Provides links to a specific type from schema.org. + */ +public class ItemTypeProcessor extends ItemSignpostingProcessor { + + private static final Logger log = Logger.getLogger(ItemTypeProcessor.class); + private static final String ABOUT_PAGE_URI = "https://schema.org/AboutPage"; + + @Autowired + private SimpleMapConverter mapConverterDSpaceToSchemaOrgUri; + + @Autowired + private ItemService itemService; + + public ItemTypeProcessor(FrontendUrlService frontendUrlService) { + super(frontendUrlService); + setRelation(LinksetRelationType.TYPE); + } + + @Override + public void addLinkSetNodes(Context context, HttpServletRequest request, + Item item, List linksetNodes) { + try { + linksetNodes.add(new LinksetNode(ABOUT_PAGE_URI, getRelation(), buildAnchor(context, item))); + String type = itemService.getMetadataFirstValue(item, "dc", "type", null, Item.ANY); + if (StringUtils.isNotBlank(type)) { + String typeSchemeUri = mapConverterDSpaceToSchemaOrgUri.getValue(type); + linksetNodes.add( + new LinksetNode(typeSchemeUri, getRelation(), buildAnchor(context, item)) + ); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataDescribesSignpostingProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataDescribesSignpostingProcessor.java new file mode 100644 index 000000000000..baae16b88389 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataDescribesSignpostingProcessor.java @@ -0,0 +1,43 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.metadata; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.util.FrontendUrlService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * An extension of {@link MetadataSignpostingProcessor} for the 'describes' relation. + */ +public class MetadataDescribesSignpostingProcessor extends MetadataSignpostingProcessor { + + @Autowired + private FrontendUrlService frontendUrlService; + + public MetadataDescribesSignpostingProcessor() { + setRelation(LinksetRelationType.DESCRIBES); + } + + @Override + public void addLinkSetNodes( + Context context, + HttpServletRequest request, + Item item, + List linksetNodes + ) { + String itemUrl = frontendUrlService.generateUrl(context, item); + String anchor = buildAnchor(item); + linksetNodes.add(new LinksetNode(itemUrl, getRelation(), "text/html", anchor)); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataSignpostingProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataSignpostingProcessor.java new file mode 100644 index 000000000000..7b4e9135f1a8 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataSignpostingProcessor.java @@ -0,0 +1,30 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.metadata; + +import org.dspace.app.rest.signposting.processor.AbstractSignPostingProcessor; +import org.dspace.app.rest.signposting.processor.SignPostingProcessor; +import org.dspace.content.Item; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; + +/** + * An abstract class represents {@link SignPostingProcessor } for a metadata. + */ +public abstract class MetadataSignpostingProcessor extends AbstractSignPostingProcessor + implements SignPostingProcessor { + + private final ConfigurationService configurationService = + DSpaceServicesFactory.getInstance().getConfigurationService(); + + public String buildAnchor(Item item) { + String baseUrl = configurationService.getProperty("dspace.ui.url"); + String signpostingPath = configurationService.getProperty("signposting.path"); + return baseUrl + "/" + signpostingPath + "/describedby/" + item.getID(); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/LinksetService.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/LinksetService.java new file mode 100644 index 000000000000..33d0c10b7415 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/LinksetService.java @@ -0,0 +1,50 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.service; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.core.Context; + +/** + * Service for work with linksets. + */ +public interface LinksetService { + + /** + * Returns list of linkset nodes for multiple linksets. + * + * @param request request + * @param context context + * @param item item + * @return two-dimensional list representing a list of lists where each list represents the linkset nodes. + */ + List> createLinksetNodesForMultipleLinksets( + HttpServletRequest request, + Context context, + Item item + ); + + /** + * Returns list of linkset nodes for single linkset. + * + * @param request request + * @param context context + * @param object dspace object + * @return two-dimensional list representing a list of lists where each list represents the linkset nodes. + */ + List createLinksetNodesForSingleLinkset( + HttpServletRequest request, + Context context, + DSpaceObject object + ); +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/impl/LinksetServiceImpl.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/impl/LinksetServiceImpl.java new file mode 100644 index 000000000000..399b7bd1e6b0 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/impl/LinksetServiceImpl.java @@ -0,0 +1,153 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.service.impl; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.log4j.Logger; +import org.dspace.app.rest.security.BitstreamMetadataReadPermissionEvaluatorPlugin; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.processor.bitstream.BitstreamSignpostingProcessor; +import org.dspace.app.rest.signposting.processor.item.ItemSignpostingProcessor; +import org.dspace.app.rest.signposting.processor.metadata.MetadataSignpostingProcessor; +import org.dspace.app.rest.signposting.service.LinksetService; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.utils.DSpace; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +/** + * Default implementation of {@link LinksetService}. + */ +@Service +public class LinksetServiceImpl implements LinksetService { + + private static final Logger log = Logger.getLogger(LinksetServiceImpl.class); + + @Autowired + protected ItemService itemService; + + @Autowired + private BitstreamMetadataReadPermissionEvaluatorPlugin bitstreamMetadataReadPermissionEvaluatorPlugin; + + private final List bitstreamProcessors = new DSpace().getServiceManager() + .getServicesByType(BitstreamSignpostingProcessor.class); + + private final List itemProcessors = new DSpace().getServiceManager() + .getServicesByType(ItemSignpostingProcessor.class); + + private final List metadataProcessors = new DSpace().getServiceManager() + .getServicesByType(MetadataSignpostingProcessor.class); + + @Override + public List> createLinksetNodesForMultipleLinksets( + HttpServletRequest request, + Context context, + Item item + ) { + ArrayList> linksets = new ArrayList<>(); + addItemLinksets(request, context, item, linksets); + addBitstreamLinksets(request, context, item, linksets); + addMetadataLinksets(request, context, item, linksets); + return linksets; + } + + @Override + public List createLinksetNodesForSingleLinkset( + HttpServletRequest request, + Context context, + DSpaceObject object + ) { + List linksetNodes = new ArrayList<>(); + if (object.getType() == Constants.ITEM) { + for (ItemSignpostingProcessor processor : itemProcessors) { + processor.addLinkSetNodes(context, request, (Item) object, linksetNodes); + } + } else if (object.getType() == Constants.BITSTREAM) { + for (BitstreamSignpostingProcessor processor : bitstreamProcessors) { + processor.addLinkSetNodes(context, request, (Bitstream) object, linksetNodes); + } + } + return linksetNodes; + } + + private void addItemLinksets( + HttpServletRequest request, + Context context, + Item item, + List> linksets + ) { + List linksetNodes = new ArrayList<>(); + if (item.getType() == Constants.ITEM) { + for (ItemSignpostingProcessor sp : itemProcessors) { + sp.addLinkSetNodes(context, request, item, linksetNodes); + } + } + linksets.add(linksetNodes); + } + + private void addBitstreamLinksets( + HttpServletRequest request, + Context context, + Item item, + ArrayList> linksets + ) { + Iterator bitstreamsIterator = getItemBitstreams(context, item); + bitstreamsIterator.forEachRemaining(bitstream -> { + try { + boolean isAuthorized = bitstreamMetadataReadPermissionEvaluatorPlugin + .metadataReadPermissionOnBitstream(context, bitstream); + if (isAuthorized) { + List bitstreamLinkset = new ArrayList<>(); + for (BitstreamSignpostingProcessor processor : bitstreamProcessors) { + processor.addLinkSetNodes(context, request, bitstream, bitstreamLinkset); + } + if (!bitstreamLinkset.isEmpty()) { + linksets.add(bitstreamLinkset); + } + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + } + }); + } + + private void addMetadataLinksets( + HttpServletRequest request, + Context context, + Item item, + ArrayList> linksets + ) { + for (MetadataSignpostingProcessor processor : metadataProcessors) { + List metadataLinkset = new ArrayList<>(); + processor.addLinkSetNodes(context, request, item, metadataLinkset); + if (!metadataLinkset.isEmpty()) { + linksets.add(metadataLinkset); + } + } + } + + private Iterator getItemBitstreams(Context context, Item item) { + try { + List bundles = itemService.getBundles(item, Constants.DEFAULT_BUNDLE_NAME); + return bundles.stream().flatMap(bundle -> bundle.getBitstreams().stream()).iterator(); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/utils/LinksetMapper.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/utils/LinksetMapper.java new file mode 100644 index 000000000000..5da05bc44059 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/utils/LinksetMapper.java @@ -0,0 +1,56 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.utils; + +import java.util.List; +import java.util.stream.Collectors; + +import org.dspace.app.rest.signposting.model.Linkset; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelation; +import org.dspace.app.rest.signposting.model.LinksetRelationType; + +/** + * Class for mapping {@link Linkset} objects. + */ +public class LinksetMapper { + + private LinksetMapper() { + } + + /** + * Converts list of linkset nodes into linkset. + * + * @param linksetNodes list of linkset nodes + * @return linkset + */ + public static Linkset map(List linksetNodes) { + Linkset linkset = new Linkset(); + linkset.setLinkset(getLinksetRelationsByType(linksetNodes, LinksetRelationType.LINKSET)); + linkset.setAuthor(getLinksetRelationsByType(linksetNodes, LinksetRelationType.AUTHOR)); + linkset.setItem(getLinksetRelationsByType(linksetNodes, LinksetRelationType.ITEM)); + linkset.setType(getLinksetRelationsByType(linksetNodes, LinksetRelationType.TYPE)); + linkset.setCollection(getLinksetRelationsByType(linksetNodes, LinksetRelationType.COLLECTION)); + linkset.setLicense(getLinksetRelationsByType(linksetNodes, LinksetRelationType.LICENSE)); + linkset.setCiteAs(getLinksetRelationsByType(linksetNodes, LinksetRelationType.CITE_AS)); + linkset.setDescribes(getLinksetRelationsByType(linksetNodes, LinksetRelationType.DESCRIBES)); + linkset.setDescribedby(getLinksetRelationsByType(linksetNodes, LinksetRelationType.DESCRIBED_BY)); + if (!linksetNodes.isEmpty()) { + linkset.setAnchor(linksetNodes.get(0).getAnchor()); + } + return linkset; + } + + private static List getLinksetRelationsByType(List linkset, + LinksetRelationType type) { + return linkset.stream() + .filter(linksetNode -> type.equals(linksetNode.getRelation())) + .map(linksetNode -> new LinksetRelation(linksetNode.getLink(), linksetNode.getType())) + .collect(Collectors.toList()); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/AbstractTopSolrStatsFieldGenerator.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/AbstractTopSolrStatsFieldGenerator.java index 9d04803c71ea..6d8242ca9fb1 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/AbstractTopSolrStatsFieldGenerator.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/AbstractTopSolrStatsFieldGenerator.java @@ -66,8 +66,8 @@ Dataset getTypeStatsDataset(Context context, DSpaceObject dso, String typeAxisSt } else { hasValidRelation = true; - query = statisticsDatasetDisplay - .composeQueryWithInverseRelation(dso, discoveryConfiguration.getDefaultFilterQueries()); + query = statisticsDatasetDisplay.composeQueryWithInverseRelation( + dso, discoveryConfiguration.getDefaultFilterQueries(), getDsoType(dso)); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/AbstractUsageReportGenerator.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/AbstractUsageReportGenerator.java index 940773547da4..23c8f99e5857 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/AbstractUsageReportGenerator.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/AbstractUsageReportGenerator.java @@ -40,5 +40,6 @@ public String getRelation() { public void setRelation(String relation) { this.relation = relation; } + } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/StatisticsReportsConfiguration.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/StatisticsReportsConfiguration.java index 3e366f7cc9de..8fef2b35853a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/StatisticsReportsConfiguration.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/StatisticsReportsConfiguration.java @@ -11,6 +11,7 @@ import java.util.Map; import java.util.Optional; +import org.apache.commons.lang3.StringUtils; import org.dspace.app.rest.model.UsageReportCategoryRest; import org.dspace.content.Bitstream; import org.dspace.content.Collection; @@ -40,6 +41,13 @@ public List getCategories(DSpaceObject dso) { } else if (dso instanceof Community) { return mapping.get("community"); } else if (dso instanceof Collection) { + String entityType = getEntityType(dso); + if (StringUtils.isNotEmpty(entityType)) { + List result = mapping.get("collection-" + entityType); + if (result != null) { + return result; + } + } return mapping.get("collection"); } else if (dso instanceof Item) { Item item = (Item) dso; @@ -59,6 +67,16 @@ public List getCategories(DSpaceObject dso) { return null; } + private String getEntityType(DSpaceObject dso) { + return dso.getMetadata() + .stream() + .filter(metadataValue -> + "dspace.entity.type".equals(metadataValue.getMetadataField().toString('.'))) + .map(MetadataValue::getValue) + .findFirst() + .orElse(""); + } + public UsageReportGenerator getReportGenerator(DSpaceObject dso, String reportId) { List categories = getCategories(dso); Optional cat = categories.stream().filter(x -> x.getReports().containsKey(reportId)) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/TopCategoriesGenerator.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/TopCategoriesGenerator.java index 58532e46bf0f..fbba8f902ee1 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/TopCategoriesGenerator.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/TopCategoriesGenerator.java @@ -10,6 +10,7 @@ import static org.dspace.core.Constants.ITEM; import java.io.IOException; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -24,6 +25,7 @@ import org.dspace.core.Context; import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfigurationService; +import org.dspace.services.ConfigurationService; import org.dspace.statistics.content.StatisticsDatasetDisplay; import org.dspace.statistics.service.SolrLoggerService; import org.springframework.beans.factory.annotation.Autowired; @@ -40,6 +42,9 @@ public class TopCategoriesGenerator extends AbstractUsageReportGenerator { @Autowired private SolrLoggerService solrLoggerService; + @Autowired + private ConfigurationService configurationService; + @Autowired private DiscoveryConfigurationService discoveryConfigurationService; @@ -67,8 +72,8 @@ private Map getCategoriesCount(DSpaceObject dso, String startDa Map categoriesCount = new HashMap(); - for (String category : categoryQueries.keySet()) { - String categoryQuery = categoryQueries.get(category); + for (String category : getCategoryQueries().keySet()) { + String categoryQuery = getCategoryQueries().get(category); Integer categoryCount = getCategoryCount(dso, discoveryConfiguration, categoryQuery, startDate, endDate); categoriesCount.put(category, categoryCount); } @@ -93,7 +98,8 @@ private int getCategoryCount(DSpaceObject dso, DiscoveryConfiguration discoveryC private String composeCategoryQuery(DSpaceObject dso, DiscoveryConfiguration configuration, String categoryQuery) { List defaultFilterQueries = configuration.getDefaultFilterQueries(); - String query = new StatisticsDatasetDisplay().composeQueryWithInverseRelation(dso, defaultFilterQueries); + String query = new StatisticsDatasetDisplay().composeQueryWithInverseRelation(dso, + defaultFilterQueries, dso.getType()); if (categoryQuery.equals(OTHER_CATEGORY)) { return query + " AND " + getAllCategoryQueriesReverted(); @@ -104,7 +110,7 @@ private String composeCategoryQuery(DSpaceObject dso, DiscoveryConfiguration con } private String getAllCategoryQueriesReverted() { - return categoryQueries.values().stream() + return getCategoryQueries().values().stream() .filter(categoryQuery -> !OTHER_CATEGORY.equals(categoryQuery)) .map(categoryQuery -> "-" + formatCategoryQuery(categoryQuery)) .collect(Collectors.joining(" AND ")); @@ -129,10 +135,26 @@ public String getReportType() { } public Map getCategoryQueries() { + if (categoryQueries == null) { + return getDefaultCategoryQueries(); + } return categoryQueries; } public void setCategoryQueries(Map categoryQueries) { this.categoryQueries = categoryQueries; } + + private Map getDefaultCategoryQueries() { + return Arrays.stream(getDefaultEntityTypes()) + .collect(Collectors.toMap( + type -> type.toLowerCase(), + type -> "entityType_keyword: '" + type + "'" + )); + } + + private String[] getDefaultEntityTypes() { + return configurationService.getArrayProperty("cris.entity-type"); + } + } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/TopItemsGenerator.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/TopItemsGenerator.java index d0805c68de36..1e67b0cac66c 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/TopItemsGenerator.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/TopItemsGenerator.java @@ -81,9 +81,8 @@ public UsageReportRest createUsageReport(Context context, DSpaceObject root, Str } else { hasValidRelation = true; - query = statisticsDatasetDisplay - .composeQueryWithInverseRelation(root, - discoveryConfiguration.getDefaultFilterQueries()); + query = statisticsDatasetDisplay.composeQueryWithInverseRelation(root, + discoveryConfiguration.getDefaultFilterQueries(), getDsoType()); } } if (!hasValidRelation) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/TotalDownloadsAndVisitsGenerator.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/TotalDownloadsAndVisitsGenerator.java index 2a1d95e0c291..620333ce61b9 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/TotalDownloadsAndVisitsGenerator.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/TotalDownloadsAndVisitsGenerator.java @@ -59,9 +59,8 @@ public UsageReportRest createUsageReport(Context context, DSpaceObject dso, Stri } else { hasValidRelation = true; - query = statisticsDatasetDisplay. - composeQueryWithInverseRelation( - dso, discoveryConfiguration.getDefaultFilterQueries()); + query = statisticsDatasetDisplay.composeQueryWithInverseRelation(dso, + discoveryConfiguration.getDefaultFilterQueries(), dso.getType()); } } if (!hasValidRelation) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/TotalVisitGenerator.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/TotalVisitGenerator.java index 98ee393b5cd3..e419ac660fd6 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/TotalVisitGenerator.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/TotalVisitGenerator.java @@ -118,7 +118,7 @@ Dataset getDSOStatsDataset(Context context, DSpaceObject dso, int dsoType, Strin } else { hasValidRelation = true; query = statisticsDatasetDisplay.composeQueryWithInverseRelation( - dso, discoveryConfiguration.getDefaultFilterQueries()); + dso, discoveryConfiguration.getDefaultFilterQueries(), dso.getType()); type_of_dso = dso.getType(); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/TotalVisitPerPeriodGenerator.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/TotalVisitPerPeriodGenerator.java index 5f7f53898abe..465f1022372a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/TotalVisitPerPeriodGenerator.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/statistics/TotalVisitPerPeriodGenerator.java @@ -97,7 +97,8 @@ public UsageReportRest createUsageReport(Context context, DSpaceObject dso, Stri statisticsDatasetDisplay .composeQueryWithInverseRelation( dso, - discoveryConfiguration.getDefaultFilterQueries() + discoveryConfiguration.getDefaultFilterQueries(), + getDsoType(dso) ) ); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/SubmissionService.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/SubmissionService.java index 4c475b889bf0..12e18e8a0be4 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/SubmissionService.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/SubmissionService.java @@ -37,7 +37,6 @@ import org.dspace.app.rest.repository.WorkspaceItemRestRepository; import org.dspace.app.rest.utils.ContextUtil; import org.dspace.app.util.SubmissionConfig; -import org.dspace.app.util.SubmissionConfigReader; import org.dspace.app.util.SubmissionConfigReaderException; import org.dspace.app.util.SubmissionStepConfig; import org.dspace.authorize.AuthorizeException; @@ -61,6 +60,8 @@ import org.dspace.services.ConfigurationService; import org.dspace.services.RequestService; import org.dspace.services.model.Request; +import org.dspace.submit.factory.SubmissionServiceFactory; +import org.dspace.submit.service.SubmissionConfigService; import org.dspace.workflow.WorkflowException; import org.dspace.workflow.WorkflowItemService; import org.dspace.workflow.WorkflowService; @@ -110,10 +111,10 @@ public class SubmissionService { @Autowired private ResearcherProfileService researcherProfileService; - private SubmissionConfigReader submissionConfigReader; + private SubmissionConfigService submissionConfigService; public SubmissionService() throws SubmissionConfigReaderException { - submissionConfigReader = new SubmissionConfigReader(); + submissionConfigService = SubmissionServiceFactory.getInstance().getSubmissionConfigService(); } /** @@ -359,7 +360,7 @@ public List uploadFileToInprogressSubmission(Context context, HttpSer context.turnOffAuthorisationSystem(); } SubmissionConfig submissionConfig = - submissionConfigReader.getSubmissionConfigByName(wsi.getSubmissionDefinition().getName()); + submissionConfigService.getSubmissionConfigByName(wsi.getSubmissionDefinition().getName()); List stepInstancesAndConfigs = new ArrayList(); // we need to run the preProcess of all the appropriate steps and move on to the // upload and postProcess step @@ -428,7 +429,7 @@ public List uploadFileToInprogressSubmission(Context context, HttpSer public void evaluatePatchToInprogressSubmission(Context context, HttpServletRequest request, InProgressSubmission source, AInprogressSubmissionRest wsi, String section, Operation op) { boolean sectionExist = false; - SubmissionConfig submissionConfig = submissionConfigReader + SubmissionConfig submissionConfig = submissionConfigService .getSubmissionConfigByName(wsi.getSubmissionDefinition().getName()); List stepInstancesAndConfigs = new ArrayList(); // we need to run the preProcess of all the appropriate steps and move on to the diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionAddPatchOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionAddPatchOperation.java index 60c98b9f1a72..8952ae16da56 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionAddPatchOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionAddPatchOperation.java @@ -6,9 +6,11 @@ * http://www.dspace.org/license/ */ package org.dspace.app.rest.submit.factory.impl; + import java.sql.SQLException; import java.text.ParseException; import java.util.ArrayList; +import java.util.Date; import java.util.List; import javax.servlet.http.HttpServletRequest; @@ -25,11 +27,12 @@ import org.dspace.services.ConfigurationService; import org.dspace.submit.model.AccessConditionConfiguration; import org.dspace.submit.model.AccessConditionConfigurationService; +import org.dspace.util.TimeHelpers; import org.springframework.beans.factory.annotation.Autowired; /** * Submission "add" operation to add custom resource policies. - * + * * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) */ public class AccessConditionAddPatchOperation extends AddPatchOperation { @@ -57,6 +60,18 @@ void add(Context context, HttpServletRequest currentRequest, InProgressSubmissio String[] absolutePath = getAbsolutePath(path).split("/"); List accessConditions = parseAccessConditions(path, value, absolutePath); + // Clamp access condition dates to midnight UTC + for (AccessConditionDTO condition : accessConditions) { + Date date = condition.getStartDate(); + if (null != date) { + condition.setStartDate(TimeHelpers.toMidnightUTC(date)); + } + date = condition.getEndDate(); + if (null != date) { + condition.setEndDate(TimeHelpers.toMidnightUTC(date)); + } + } + verifyAccessConditions(context, configuration, accessConditions); if (absolutePath.length == 1) { @@ -73,7 +88,7 @@ void add(Context context, HttpServletRequest currentRequest, InProgressSubmissio } private List parseAccessConditions(String path, Object value, String[] split) { - List accessConditions = new ArrayList(); + List accessConditions = new ArrayList<>(); if (split.length == 1) { accessConditions = evaluateArrayObject((LateObjectEvaluator) value); } else if (split.length == 2) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionReplacePatchOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionReplacePatchOperation.java index 0216628a6b87..d2529cbca303 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionReplacePatchOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionReplacePatchOperation.java @@ -6,6 +6,7 @@ * http://www.dspace.org/license/ */ package org.dspace.app.rest.submit.factory.impl; + import java.sql.SQLException; import java.text.ParseException; import java.text.SimpleDateFormat; @@ -29,6 +30,7 @@ import org.dspace.submit.model.AccessConditionConfiguration; import org.dspace.submit.model.AccessConditionConfigurationService; import org.dspace.submit.model.AccessConditionOption; +import org.dspace.util.TimeHelpers; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -106,7 +108,7 @@ private AccessConditionOption getOption(AccessConditionConfiguration configurati return null; } - private AccessConditionDTO createDTO(ResourcePolicy rpToReplace, String attributeReplace, String valueToReplare) + private AccessConditionDTO createDTO(ResourcePolicy rpToReplace, String attributeReplace, String valueToReplace) throws ParseException { AccessConditionDTO accessCondition = new AccessConditionDTO(); accessCondition.setName(rpToReplace.getRpName()); @@ -114,13 +116,13 @@ private AccessConditionDTO createDTO(ResourcePolicy rpToReplace, String attribut accessCondition.setEndDate(rpToReplace.getEndDate()); switch (attributeReplace) { case "name": - accessCondition.setName(valueToReplare); + accessCondition.setName(valueToReplace); return accessCondition; case "startDate": - accessCondition.setStartDate(parseDate(valueToReplare)); + accessCondition.setStartDate(TimeHelpers.toMidnightUTC(parseDate(valueToReplace))); return accessCondition; case "endDate": - accessCondition.setEndDate(parseDate(valueToReplare)); + accessCondition.setEndDate(TimeHelpers.toMidnightUTC(parseDate(valueToReplace))); return accessCondition; default: throw new UnprocessableEntityException("The provided attribute: " @@ -128,17 +130,17 @@ private AccessConditionDTO createDTO(ResourcePolicy rpToReplace, String attribut } } - private void updatePolicy(Context context, String valueToReplare, String attributeReplace, + private void updatePolicy(Context context, String valueToReplace, String attributeReplace, ResourcePolicy rpToReplace) throws SQLException, AuthorizeException { switch (attributeReplace) { case "name": - rpToReplace.setRpName(valueToReplare); + rpToReplace.setRpName(valueToReplace); break; case "startDate": - rpToReplace.setStartDate(parseDate(valueToReplare)); + rpToReplace.setStartDate(TimeHelpers.toMidnightUTC(parseDate(valueToReplace))); break; case "endDate": - rpToReplace.setEndDate(parseDate(valueToReplare)); + rpToReplace.setEndDate(TimeHelpers.toMidnightUTC(parseDate(valueToReplace))); break; default: throw new IllegalArgumentException("Attribute to replace is not valid:" + attributeReplace); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/DetectDuplicateAddPatchOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/DetectDuplicateAddPatchOperation.java index 4561a8a9c807..819bba0c1423 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/DetectDuplicateAddPatchOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/DetectDuplicateAddPatchOperation.java @@ -12,7 +12,7 @@ import org.dspace.app.deduplication.model.DuplicateDecisionObjectRest; import org.dspace.app.deduplication.model.DuplicateDecisionType; -import org.dspace.app.deduplication.utils.DedupUtils; +import org.dspace.app.deduplication.utils.IDedupUtils; import org.dspace.app.rest.exception.UnprocessableEntityException; import org.dspace.app.rest.model.patch.LateObjectEvaluator; import org.dspace.content.InProgressSubmission; @@ -43,7 +43,7 @@ void add(Context context, HttpServletRequest currentRequest, InProgressSubmissio String.format("The specified path '%s' is not valid", getAbsolutePath(path))); } - DedupUtils dedupUtils = new DSpace().getServiceManager().getServiceByName("dedupUtils", DedupUtils.class); + IDedupUtils IDedupUtils = new DSpace().getServiceManager().getServiceByName("dedupUtils", IDedupUtils.class); DuplicateDecisionObjectRest decisionObject = evaluateSingleObject((LateObjectEvaluator) value); UUID currentItemID = source.getItem().getID(); @@ -98,7 +98,7 @@ void add(Context context, HttpServletRequest currentRequest, InProgressSubmissio // generate UnprocessableEntityException if decisionObject is invalid try { - if (!dedupUtils.validateDecision(decisionObject)) { + if (!IDedupUtils.validateDecision(decisionObject)) { throw new UnprocessableEntityException( String.format("The specified decision %s is not valid", decisionObject.getValue())); } @@ -106,13 +106,13 @@ void add(Context context, HttpServletRequest currentRequest, InProgressSubmissio throw new UnprocessableEntityException(String.format("The specified decision %s is not valid", subPath)); } - if (!dedupUtils.matchExist(context, currentItemID, duplicateItemID, resourceType, null, isInWorkflow)) { + if (!IDedupUtils.matchExist(context, currentItemID, duplicateItemID, resourceType, null, isInWorkflow)) { throw new UnprocessableEntityException( String.format("Cannot find any duplicate match related to Item %s", duplicateItemID)); } - dedupUtils.setDuplicateDecision(context, source.getItem().getID(), duplicateItemID, source.getItem().getType(), - decisionObject); + IDedupUtils.setDuplicateDecision(context, source.getItem().getID(), duplicateItemID, source.getItem().getType(), + decisionObject); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DescribeStep.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DescribeStep.java index fa2dc320b87b..98e62da012b5 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DescribeStep.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DescribeStep.java @@ -29,8 +29,13 @@ import org.dspace.app.util.DCInputsReader; import org.dspace.app.util.DCInputsReaderException; import org.dspace.app.util.SubmissionStepConfig; +import org.dspace.app.util.TypeBindUtils; import org.dspace.content.InProgressSubmission; import org.dspace.content.MetadataValue; +import org.dspace.content.RelationshipMetadataService; +import org.dspace.content.authority.factory.ContentAuthorityServiceFactory; +import org.dspace.content.authority.service.MetadataAuthorityService; +import org.dspace.content.factory.ContentServiceFactory; import org.dspace.core.Context; import org.dspace.core.Utils; import org.dspace.services.ConfigurationService; @@ -53,6 +58,13 @@ public class DescribeStep extends AbstractProcessingStep { private final ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + private final MetadataAuthorityService metadataAuthorityService = ContentAuthorityServiceFactory + .getInstance() + .getMetadataAuthorityService(); + + private RelationshipMetadataService relationshipMetadataService = + ContentServiceFactory.getInstance().getRelationshipMetadataService(); + public DescribeStep() throws DCInputsReaderException { inputReader = DCInputsReaderFactory.getDCInputsReader(); } @@ -72,15 +84,11 @@ public DataDescribe getData(SubmissionService submissionService, InProgressSubmi private void readField(InProgressSubmission obj, SubmissionStepConfig config, DataDescribe data, DCInputSet inputConfig) throws DCInputsReaderException { - String documentTypeValue = ""; - List documentType = itemService.getMetadataByMetadataString(obj.getItem(), - configurationService.getProperty("submit.type-bind.field", "dc.type")); - if (documentType.size() > 0) { - documentTypeValue = documentType.get(0).getValue(); - } + + String documentType = TypeBindUtils.getTypeBindValue(obj); // Get list of all field names (including qualdrop names) allowed for this dc.type - List allowedFieldNames = inputConfig.populateAllowedFieldNames(documentTypeValue); + List allowedFieldNames = inputConfig.populateAllowedFieldNames(documentType); // Loop input rows and process submitted metadata for (DCInput[] row : inputConfig.getFields()) { @@ -98,7 +106,10 @@ private void readField(InProgressSubmission obj, SubmissionStepConfig config, Da .standardize(input.getSchema(), input.getElement(), input.getQualifier(), "-")); readField(obj, config, data, inputConfigChild); } else { - fieldsName.add(input.getFieldName()); + String fieldName = input.getFieldName(); + if (fieldName != null) { + fieldsName.add(fieldName); + } } @@ -207,4 +218,3 @@ private List getInputFieldsName(DCInputSet inputConfig, String configId) return fieldsName; } } - diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DetectPotentialDuplicateStep.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DetectPotentialDuplicateStep.java index d7ad62153bcc..90f72afe7f07 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DetectPotentialDuplicateStep.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DetectPotentialDuplicateStep.java @@ -15,8 +15,8 @@ import javax.servlet.http.HttpServletRequest; import org.dspace.app.deduplication.model.DuplicateDecisionType; -import org.dspace.app.deduplication.utils.DedupUtils; import org.dspace.app.deduplication.utils.DuplicateItemInfo; +import org.dspace.app.deduplication.utils.IDedupUtils; import org.dspace.app.rest.converter.factory.ConverterServiceFactoryImpl; import org.dspace.app.rest.model.MetadataValueRest; import org.dspace.app.rest.model.patch.Operation; @@ -54,14 +54,14 @@ public class DetectPotentialDuplicateStep extends AbstractProcessingStep { public DataDetectDuplicate getData(SubmissionService submissionService, InProgressSubmission obj, SubmissionStepConfig config) throws Exception { - DedupUtils dedupUtils = new DSpace().getServiceManager().getServiceByName("dedupUtils", DedupUtils.class); + IDedupUtils IDedupUtils = new DSpace().getServiceManager().getServiceByName("dedupUtils", IDedupUtils.class); UUID itemID = obj.getItem().getID(); int typeID = obj.getItem().getType(); boolean check = !(obj instanceof WorkspaceItem); - List potentialDuplicates = dedupUtils.getDuplicateByIDandType(getContext(), itemID, typeID, - check); + List potentialDuplicates = IDedupUtils.getDuplicateByIDandType(getContext(), itemID, typeID, + check); Map matches = processPotentialDuplicates(itemID, check, potentialDuplicates); DataDetectDuplicate result = new DataDetectDuplicate(); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ApplicationConfig.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ApplicationConfig.java index 4b2dddd6b707..b548d186a9c3 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ApplicationConfig.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ApplicationConfig.java @@ -13,18 +13,32 @@ import org.springframework.context.annotation.Configuration; /** - * This class provides extra configuration for our Spring Boot Application + * This class provides extra configuration for our Spring Boot Application. *

      - * NOTE: @ComponentScan on "org.dspace.app.configuration" provides a way for other DSpace modules or plugins - * to "inject" their own Spring configurations / subpaths into our Spring Boot webapp. + * NOTE: @ComponentScan on "org.dspace.app.configuration" provides a way for + * other DSpace modules or plugins to "inject" their own Spring configurations / + * subpaths into our Spring Boot webapp. * * @author Andrea Bollini (andrea.bollini at 4science.it) * @author Tim Donohue */ @Configuration -@ComponentScan( {"org.dspace.app.rest.converter", "org.dspace.app.rest.repository", "org.dspace.app.rest.utils", - "org.dspace.app.configuration", "org.dspace.iiif", "org.dspace.app.iiif", "org.dspace.app.rest.link", - "org.dspace.app.rest.converter.factory" }) +// Component scanning ignores any parent {@code ApplicationContext}s, so any +// bean which is in the scope of both will be duplicated. dspace-services makes +// its context the parent of this one. If a bean is explicitly configured in +// the parent, it won't be so configured in this context and you may have +// trouble. Be careful what you add here. +@ComponentScan( { + "org.dspace.app.rest.converter", + "org.dspace.app.rest.repository", + "org.dspace.app.rest.utils", + "org.dspace.app.configuration", + "org.dspace.iiif", + "org.dspace.app.iiif", + "org.dspace.app.rest.link", + "org.dspace.app.rest.converter.factory", + "org.dspace.app.scheduler" +}) public class ApplicationConfig { // Allowed CORS origins ("Access-Control-Allow-Origin" header) // Can be overridden in DSpace configuration @@ -36,6 +50,16 @@ public class ApplicationConfig { @Value("${iiif.cors.allowed-origins}") private String[] iiifCorsAllowedOrigins; + // Allowed IIIF CORS origins ("Access-Control-Allow-Origin" header) + // Can be overridden in DSpace configuration + @Value("${rest.cors.bitstream-allowed-origins}") + private String[] bitstreamCorsAllowedOrigins; + + // Allowed Signposting CORS origins ("Access-Control-Allow-Origin" header) + // Can be overridden in DSpace configuration + @Value("${signposting.cors.allowed-origins}") + private String[] signpostingCorsAllowedOrigins; + // Whether to allow credentials (cookies) in CORS requests ("Access-Control-Allow-Credentials" header) // Defaults to true. Can be overridden in DSpace configuration @Value("${rest.cors.allow-credentials:true}") @@ -46,6 +70,16 @@ public class ApplicationConfig { @Value("${iiif.cors.allow-credentials:true}") private boolean iiifCorsAllowCredentials; + // Whether to allow credentials (cookies) in CORS requests ("Access-Control-Allow-Credentials" header) + // Defaults to true. Can be overridden in DSpace configuration + @Value("${rest.cors.bitstream-allow-credentials:true}") + private boolean bitstreamsCorsAllowCredentials; + + // Whether to allow credentials (cookies) in CORS requests ("Access-Control-Allow-Credentials" header) + // Defaults to true. Can be overridden in DSpace configuration + @Value("${signposting.cors.allow-credentials:true}") + private boolean signpostingCorsAllowCredentials; + // Configured User Interface URL (default: http://localhost:4000) @Value("${dspace.ui.url:http://localhost:4000}") private String uiURL; @@ -91,6 +125,22 @@ public String[] getIiifAllowedOriginsConfig() { return this.iiifCorsAllowedOrigins; } + /** + * Returns the bitstream.cors.allowed-origins (for Bitstream access) defined in DSpace configuration. + * @return allowed origins + */ + public String[] getBitstreamAllowedOriginsConfig() { + return this.bitstreamCorsAllowedOrigins; + } + + /** + * Returns the signposting.cors.allowed-origins (for Signposting access) defined in DSpace configuration. + * @return allowed origins + */ + public String[] getSignpostingAllowedOriginsConfig() { + return this.signpostingCorsAllowedOrigins; + } + /** * Return whether to allow credentials (cookies) on CORS requests. This is used to set the * CORS "Access-Control-Allow-Credentials" header in Application class. @@ -108,4 +158,22 @@ public boolean getCorsAllowCredentials() { public boolean getIiifAllowCredentials() { return iiifCorsAllowCredentials; } + + /** + * Return whether to allow credentials (cookies) on IIIF requests. This is used to set the + * CORS "Access-Control-Allow-Credentials" header in Application class. Defaults to false. + * @return true or false + */ + public boolean getBitstreamsAllowCredentials() { + return bitstreamsCorsAllowCredentials; + } + + /** + * Return whether to allow credentials (cookies) on Signposting requests. This is used to set the + * CORS "Access-Control-Allow-Credentials" header in Application class. Defaults to false. + * @return true or false + */ + public boolean getSignpostingAllowCredentials() { + return signpostingCorsAllowCredentials; + } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/UsageReportUtils.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/UsageReportUtils.java index 90f7ec252542..47dbcef1749d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/UsageReportUtils.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/UsageReportUtils.java @@ -101,6 +101,12 @@ public class UsageReportUtils { public static final String TOP_DOWNLOAD_COUNTRIES_REPORT_ID = "TopDownloadsCountries"; public static final String TOP_DOWNLOAD_CITIES_REPORT_ID = "TopDownloadsCities"; public static final String TOTAL_DOWNLOAD_PER_MONTH_REPORT_ID = "TotalDownloadsPerMonth"; + public static final String TOP_ITEMS_CITIES_REPORT_ID = "TopItemsCities"; + public static final String TOP_ITEMS_CONTINENTS_REPORT_ID = "TopItemsContinents"; + public static final String TOP_ITEMS_COUNTRIES_REPORT_ID = "TopItemsCountries"; + public static final String TOP_ITEMS_CATEGORIES_REPORT_ID = "TopItemsCategories"; + public static final String TOTAL_ITEMS_VISITS_REPORT_ID = "TotalItemsVisits"; + public static final String TOTAL_ITEMS_VISITS_PER_MONTH_REPORT_ID = "TotalItemsVisitsPerMonth"; /** * Get list of usage reports that are applicable to the DSO (of given UUID) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java index 8fbd6451185e..fb148b8c4eb9 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java @@ -153,23 +153,50 @@ public class Utils { public Page getPage(List fullContents, @Nullable Pageable optionalPageable) { Pageable pageable = getPageable(optionalPageable); int total = fullContents.size(); - List pageContent = null; - if (pageable == null) { - pageable = PageRequest.of(0, DEFAULT_PAGE_SIZE); - } if (pageable.getOffset() > total) { throw new PaginationException(total); } else { - if (pageable.getOffset() + pageable.getPageSize() > total) { - pageContent = fullContents.subList(Math.toIntExact(pageable.getOffset()), total); - } else { - pageContent = fullContents.subList(Math.toIntExact(pageable.getOffset()), - Math.toIntExact(pageable.getOffset()) + pageable.getPageSize()); - } + List pageContent = getListSlice(fullContents, pageable); return new PageImpl<>(pageContent, pageable, total); } } + /** + * Returns list of objects for the current page. + * @param fullList the complete list of objects + * @param optionalPageable + * @return list of page objects + * @param + */ + public List getPageObjectList(List fullList, @Nullable Pageable optionalPageable) { + Pageable pageable = getPageable(optionalPageable); + int total = fullList.size(); + if (pageable.getOffset() > total) { + throw new PaginationException(total); + } else { + return getListSlice(fullList, pageable); + } + } + + /** + * Returns the list elements required for the page + * @param fullList the complete list of objects + * @param pageable + * @return list of page objects + * @param + */ + private List getListSlice(List fullList, Pageable pageable) { + int total = fullList.size(); + List pageContent = null; + if (pageable.getOffset() + pageable.getPageSize() > total) { + pageContent = fullList.subList(Math.toIntExact(pageable.getOffset()), total); + } else { + pageContent = fullList.subList(Math.toIntExact(pageable.getOffset()), + Math.toIntExact(pageable.getOffset()) + pageable.getPageSize()); + } + return pageContent; + } + /** * @param rel * @param domainClass diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/scheduler/eperson/RegistrationDataScheduler.java b/dspace-server-webapp/src/main/java/org/dspace/app/scheduler/eperson/RegistrationDataScheduler.java new file mode 100644 index 000000000000..49ceeba0dc9c --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/scheduler/eperson/RegistrationDataScheduler.java @@ -0,0 +1,60 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.scheduler.eperson; + +import java.sql.SQLException; + +import org.dspace.core.Context; +import org.dspace.eperson.RegistrationData; +import org.dspace.eperson.service.RegistrationDataService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Service; + +/** + * Contains all the schedulable task related to {@link RegistrationData} entities. + * Can be enabled via the configuration property {@code eperson.registration-data.scheduler.enabled} + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +@Service +@ConditionalOnProperty(prefix = "eperson.registration-data.scheduler", name = "enabled", havingValue = "true") +public class RegistrationDataScheduler { + + private static final Logger log = LoggerFactory.getLogger(RegistrationDataScheduler.class); + + @Autowired + private RegistrationDataService registrationDataService; + + /** + * Deletes expired {@link RegistrationData}. + * This task is scheduled to be run by the cron expression defined in the configuration file. + * + */ + @Scheduled(cron = "${eperson.registration-data.scheduler.expired-registration-data.cron:-}") + protected void deleteExpiredRegistrationData() throws SQLException { + Context context = new Context(); + context.turnOffAuthorisationSystem(); + try { + + registrationDataService.deleteExpiredRegistrations(context); + + context.restoreAuthSystemState(); + context.complete(); + } catch (Exception e) { + context.abort(); + log.error("Failed to delete expired registrations", e); + throw e; + } + } + + +} diff --git a/dspace-server-webapp/src/main/resources/application.properties b/dspace-server-webapp/src/main/resources/application.properties index a10e0f98a00d..0c26d530b74c 100644 --- a/dspace-server-webapp/src/main/resources/application.properties +++ b/dspace-server-webapp/src/main/resources/application.properties @@ -37,6 +37,12 @@ # NOTE: this configuration is filled out by Apache Ant during the DSpace install/update process. It does NOT # interact with or read its configuration from dspace.cfg. dspace.dir=${dspace.dir} + +######################## +# Servlet context path configuration for spring boot application running with embedded tomcat +# +server.servlet.context-path=/server + ######################## # Jackson serialization settings # diff --git a/dspace-server-webapp/src/main/webapp/index.html b/dspace-server-webapp/src/main/resources/static/index.html similarity index 100% rename from dspace-server-webapp/src/main/webapp/index.html rename to dspace-server-webapp/src/main/resources/static/index.html diff --git a/dspace-server-webapp/src/main/webapp/js/hal/http/client.js b/dspace-server-webapp/src/main/resources/static/js/hal/http/client.js similarity index 100% rename from dspace-server-webapp/src/main/webapp/js/hal/http/client.js rename to dspace-server-webapp/src/main/resources/static/js/hal/http/client.js diff --git a/dspace-server-webapp/src/main/webapp/js/vendor/CustomPostForm.js b/dspace-server-webapp/src/main/resources/static/js/vendor/CustomPostForm.js similarity index 100% rename from dspace-server-webapp/src/main/webapp/js/vendor/CustomPostForm.js rename to dspace-server-webapp/src/main/resources/static/js/vendor/CustomPostForm.js diff --git a/dspace-server-webapp/src/main/webapp/login.html b/dspace-server-webapp/src/main/resources/static/login.html similarity index 100% rename from dspace-server-webapp/src/main/webapp/login.html rename to dspace-server-webapp/src/main/resources/static/login.html diff --git a/dspace-server-webapp/src/main/webapp/styles.css b/dspace-server-webapp/src/main/resources/static/styles.css similarity index 100% rename from dspace-server-webapp/src/main/webapp/styles.css rename to dspace-server-webapp/src/main/resources/static/styles.css diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/item-submission.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/item-submission.xml index eca9acf79fd7..78d4eb9171c4 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/item-submission.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/item-submission.xml @@ -44,7 +44,7 @@ - + @@ -106,14 +106,14 @@ submission - - submit.progressbar.CClicense org.dspace.app.rest.submit.step.CCLicenseStep @@ -121,7 +121,7 @@ @@ -130,9 +130,9 @@ diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml new file mode 100644 index 000000000000..1c9f4e821df2 --- /dev/null +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml @@ -0,0 +1,4902 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + crisrp.name + crisrp.name.variant + crisrp.name.translated + person.givenName + person.familyName + + + + + + + + + + + dc.contributor.author + dc.contributor.editor + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.rights + + + + + + + + + + + + + + + dc.rights + + + + + + + + dc.description.provenance + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.test.parentcommunity1field + + + + + + + + + + + + + + + dc.test.subcommunity11field + + + + + + + + + + + + + + + dc.test.collection111field + + + + + + + + + + + + + + + dc.test.collection121field + + + + + + + + + + + + + + + dc.test.subcommunity21field + + + + + + + + + + + + + + dc.test.collection211field + + + + + + + + + + + + + + dc.test.collection221field + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true + + withdrawn:true OR discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + (search.resourcetype:Item AND latestVersion:true) OR search.resourcetype:WorkspaceItem OR search.resourcetype:XmlWorkflowItem + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:WorkspaceItem AND supervised:true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:PoolTask OR search.resourcetype:ClaimedTask + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:XmlWorkflowItem + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:WorkspaceItem OR search.resourcetype:XmlWorkflowItem + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:Publication + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Publication + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:Person + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Person + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:Project + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Project + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:OrgUnit + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:OrgUnit + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:JournalIssue + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:JournalIssue + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:JournalVolume + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:JournalVolume + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:Journal + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Journal + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true AND (entityType_keyword:OrgUnit OR entityType_keyword:Person) + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:OrgUnit AND dc.type:FundingOrganization + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item + search.entitytype:${researcher-profile.entity-type:Person} + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + projectinvestigators_authority:{0} AND search.resourcetype:Item + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + author_authority:{0} AND search.resourcetype:Item + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + '{'!join from=search.resourceid to=projectinvestigators_authority fromIndex=${solr.multicorePrefix}search'}'person.affiliation.name_authority:{0} + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + dc.description.sponsorship_authority:{0} + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + '{'!join from=search.resourceid to=author_authority fromIndex=${solr.multicorePrefix}search'}'person.affiliation.name_authority:{0} + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + person.affiliation.name_authority:{0} + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + involvedorganisation_authority:{0} + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + organization.parentOrganization_authority:{0} + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + dc.relation.project_authority:{0} AND entityType_keyword:Project + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + dc.relation.project_authority:{0} AND entityType_keyword:Funding + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + dc.relation.project_authority:{0} AND entityType_keyword:Publication + -withdrawn:true AND -discoverable:false + + + + + + + + + + + location.coll:{0} + + + + + + + + + + + location.comm:{0} + + + + + + + + + + + search.resourcetype:Community + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND (entityType_keyword:Publication OR entityType_keyword:Patent OR entityType_keyword:Product) + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Collection + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + + + + + + + + + + + relation.isAuthorOfPublication.latestForDiscovery + + + + + + + + + + + relation.isProjectOfPublication.latestForDiscovery + + + + + + + + + + + + relation.isOrgUnitOfPublication.latestForDiscovery + + + + + + + + + + + relation.isPublicationOfJournalIssue.latestForDiscovery + + + + + + + + + + + relation.isJournalOfPublication.latestForDiscovery + + + + + + + + + + + dc.contributor.author + dc.creator + + + + + + + + + + + + + + + dspace.entity.type + + + + + + + + + + + + + + dc.subject.* + + + + + + + + + + + + + + dc.date.issued + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.type + + + + + + + + + dc.identifier + + + + + + + + + placeholder.placeholder.placeholder + + + + + + + + + + placeholder.placeholder.placeholder + + + + + + + + + person.jobTitle + + + + + + + + + + + + + + + person.knowsLanguage + + + + + + + + + + + + + person.birthDate + + + + + + + + + + + + + + + + + person.familyName + + + + + + + + + + + person.givenName + + + + + + + + + + + relation.isOrgUnitOfPerson.latestForDiscovery + + + + + + + + + + + relation.isProjectOfPerson.latestForDiscovery + + + + + + + + + + + relation.isPublicationOfAuthor.latestForDiscovery + + + + + + + + + + + + organization.address.addressCountry + + + + + + + + + + + + + + + organization.address.addressLocality + + + + + + + + + + + + + + + organization.foundingDate + + + + + + + + + + + + + + + + organization.legalName + + + + + + + + + + + relation.isPersonOfOrgUnit.latestForDiscovery + + + + + + + + + + + relation.isProjectOfOrgUnit.latestForDiscovery + + + + + + + + + + + relation.isPublicationOfOrgUnit.latestForDiscovery + + + + + + + + + + + creativework.keywords + + + + + + + + + + + + + + + creativework.datePublished + + + + + + + + + + + + + + + + publicationissue.issueNumber + + + + + + + + + + + relation.isPublicationOfJournalIssue.latestForDiscovery + + + + + + + + + + + publicationVolume.volumeNumber + + + + + + + + + + + relation.isIssueOfJournalVolume.latestForDiscovery + + + + + + + + + + + relation.isJournalOfVolume.latestForDiscovery + + + + + + + + + + + creativework.publisher + + + + + + + + + + + + + + + creativework.editor + + + + + + + + + + + + + + + relation.isVolumeOfJournal.latestForDiscovery + + + + + + + + + + + + + + placeholder.placeholder.placeholder + + + + + + + + + + relation.isOrgUnitOfProject.latestForDiscovery + + + + + + + + + + + + relation.isPersonOfProject.latestForDiscovery + + + + + + + + + + + + relation.isPublicationOfProject.latestForDiscovery + + + + + + + + + + + relation.isContributorOfPublication.latestForDiscovery + + + + + + + + + + + relation.isPublicationOfContributor.latestForDiscovery + + + + + + + + + + + relation.isFundingAgencyOfProject.latestForDiscovery + + + + + + + + + + + relation.isProjectOfFundingAgency.latestForDiscovery + + + + + + + + + + + + + placeholder.placeholder.placeholder + + + + + + + + + crispj.investigator + crispj.coinvestigators + + + + + + + + + + + + + + + dc.contributor.editor + dc.creator + + + + + + + + + + + + + + + oairecerif.author.affiliation + oairecerif.editor.affiliation + + + + + + + + + + + + + + + dc.relation.funding + + + + + + + + + + + + + + + person.identifier.orcid + + + + + + + + + + + cris.legacyId + + + + + + + + + + + dc.identifier.doi + + + + + + + + + + + dc.type + + + + + + + + + dc.language.iso + person.knowsLanguage + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + relation.isProjectsSelectedFor + {0} + + + + + + + + + + + + relation.isResearchoutputsSelectedFor + {0} + + + + + + + + + + + + relation.isRpprojectsSelectedFor + {0} + + + + + + + + + + + + relation.isPublicationsSelectedFor + {0} + + + + + + + + + + + + relation.isRppublicationsSelectedFor + {0} + + + + + + + + + + + + relation.isPeopleSelectedFor + {0} + + + + + + + + + + + + relation.isOrganizationsSelectedFor + {0} + + + + + + + + + + + + relation.isGrantsSelectedFor + {0} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.type + + + + + + + + + + + + + + + + + + + + + dc.date.issued + + + + + + + + + diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml index b40cfa1704a5..1f668b9e1616 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml @@ -63,4 +63,9 @@ + + + + + diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/TestApplication.java b/dspace-server-webapp/src/test/java/org/dspace/app/TestApplication.java new file mode 100644 index 000000000000..8db55b6dedd1 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/TestApplication.java @@ -0,0 +1,22 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app; + +import org.dspace.app.rest.WebApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; + +/** + * Spring boot application for integration tests. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@SpringBootApplication(scanBasePackageClasses = WebApplication.class) +public class TestApplication { + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptIT.java new file mode 100644 index 000000000000..da0f90ca97c7 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptIT.java @@ -0,0 +1,502 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +import static com.jayway.jsonpath.JsonPath.read; +import static org.dspace.app.matcher.ResourcePolicyMatcher.matches; +import static org.dspace.authorize.ResourcePolicy.TYPE_CUSTOM; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.junit.Assert.assertTrue; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.multipart; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.util.LinkedList; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.dspace.app.rest.converter.DSpaceRunnableParameterConverter; +import org.dspace.app.rest.model.ParameterValueRest; +import org.dspace.app.rest.model.ProcessRest; +import org.dspace.app.rest.model.ScriptRest; +import org.dspace.app.rest.projection.Projection; +import org.dspace.app.rest.test.AbstractEntityIntegrationTest; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.ProcessBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.core.Constants; +import org.dspace.eperson.Group; +import org.dspace.eperson.service.GroupService; +import org.dspace.scripts.DSpaceCommandLineParameter; +import org.dspace.scripts.Process; +import org.dspace.scripts.service.ProcessService; +import org.junit.After; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.MediaType; +import org.springframework.mock.web.MockMultipartFile; + +/** + * Basic integration testing for the bulk access Import feature via UI {@link BulkAccessControl}. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.com) + */ +public class BulkAccessControlScriptIT extends AbstractEntityIntegrationTest { + + @Autowired + private DSpaceRunnableParameterConverter dSpaceRunnableParameterConverter; + + @Autowired + private GroupService groupService; + + @Autowired + private ProcessService processService; + + private final static String SCRIPTS_ENDPOINT = "/api/" + ScriptRest.CATEGORY + "/" + ScriptRest.PLURAL_NAME; + private final static String CURATE_SCRIPT_ENDPOINT = SCRIPTS_ENDPOINT + "/bulk-access-control/" + + ProcessRest.PLURAL_NAME; + + @After + @Override + public void destroy() throws Exception { + List processes = processService.findAll(context); + for (Process process : processes) { + ProcessBuilder.deleteProcess(process.getID()); + } + + super.destroy(); + } + + @Test + public void bulkAccessScriptWithAdminUserTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Community subCommunity = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, subCommunity) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Public item") + .withSubject("ExtraEntry") + .build(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); + + MockMultipartFile bitstreamFile = + new MockMultipartFile("file", "test.json", MediaType.TEXT_PLAIN_VALUE, inputStream); + + AtomicReference idRef = new AtomicReference<>(); + + LinkedList parameters = new LinkedList<>(); + + parameters.add(new DSpaceCommandLineParameter("-u", item.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-f", "test.json")); + + + List list = + parameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + + context.restoreAuthSystemState(); + + try { + String token = getAuthToken(admin.getEmail(), password); + + getClient(token) + .perform( + multipart(CURATE_SCRIPT_ENDPOINT) + .file(bitstreamFile) + .param("properties", new ObjectMapper().writeValueAsString(list))) + .andExpect(status().isAccepted()) + .andDo(result -> idRef.set(read(result.getResponse().getContentAsString(), "$.processId"))); + } finally { + ProcessBuilder.deleteProcess(idRef.get()); + } + } + + @Test + public void bulkAccessScriptWithAdminUserOfTargetCommunityTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .withAdminGroup(eperson) + .build(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); + + MockMultipartFile bitstreamFile = + new MockMultipartFile("file", "test.json", MediaType.TEXT_PLAIN_VALUE, inputStream); + + AtomicReference idRef = new AtomicReference<>(); + + LinkedList parameters = new LinkedList<>(); + + parameters.add(new DSpaceCommandLineParameter("-u", parentCommunity.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-f", "test.json")); + + + List list = + parameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + + context.restoreAuthSystemState(); + + try { + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token) + .perform( + multipart(CURATE_SCRIPT_ENDPOINT) + .file(bitstreamFile) + .param("properties", new ObjectMapper().writeValueAsString(list))) + .andExpect(status().isAccepted()) + .andDo(result -> idRef.set(read(result.getResponse().getContentAsString(), "$.processId"))); + } finally { + ProcessBuilder.deleteProcess(idRef.get()); + } + } + + @Test + public void bulkAccessScriptWithAdminUserOfTargetCollectionTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Community subCommunity = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, subCommunity) + .withName("collection") + .withAdminGroup(eperson) + .build(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); + + MockMultipartFile bitstreamFile = + new MockMultipartFile("file", "test.json", MediaType.TEXT_PLAIN_VALUE, inputStream); + + AtomicReference idRef = new AtomicReference<>(); + + LinkedList parameters = new LinkedList<>(); + + parameters.add(new DSpaceCommandLineParameter("-u", collection.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-f", "test.json")); + + + List list = + parameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + + context.restoreAuthSystemState(); + + try { + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token) + .perform( + multipart(CURATE_SCRIPT_ENDPOINT) + .file(bitstreamFile) + .param("properties", new ObjectMapper().writeValueAsString(list))) + .andExpect(status().isAccepted()) + .andDo(result -> idRef.set(read(result.getResponse().getContentAsString(), "$.processId"))); + } finally { + ProcessBuilder.deleteProcess(idRef.get()); + } + } + + @Test + public void bulkAccessScriptWithAdminUserOfTargetItemTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Community subCommunity = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, subCommunity) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Public item") + .withSubject("ExtraEntry") + .withAdminUser(eperson) + .build(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); + + MockMultipartFile bitstreamFile = + new MockMultipartFile("file", "test.json", MediaType.TEXT_PLAIN_VALUE, inputStream); + + AtomicReference idRef = new AtomicReference<>(); + + LinkedList parameters = new LinkedList<>(); + + parameters.add(new DSpaceCommandLineParameter("-u", item.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-f", "test.json")); + + + List list = + parameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + + context.restoreAuthSystemState(); + + try { + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token) + .perform( + multipart(CURATE_SCRIPT_ENDPOINT) + .file(bitstreamFile) + .param("properties", new ObjectMapper().writeValueAsString(list))) + .andExpect(status().isAccepted()) + .andDo(result -> idRef.set(read(result.getResponse().getContentAsString(), "$.processId"))); + } finally { + ProcessBuilder.deleteProcess(idRef.get()); + } + } + + @Test + public void bulkAccessScriptWithMultipleTargetUuidsWithAdminUserTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Community subCommunity = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, subCommunity) + .withName("collection") + .build(); + + Item itemOne = ItemBuilder.createItem(context, collection) + .withTitle("Public item one") + .build(); + + Item itemTwo = ItemBuilder.createItem(context, collection) + .withTitle("Public item two") + .build(); + + Item itemThree = ItemBuilder.createItem(context, collection) + .withTitle("Public item three") + .build(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); + + MockMultipartFile bitstreamFile = + new MockMultipartFile("file", "test.json", MediaType.TEXT_PLAIN_VALUE, inputStream); + + AtomicReference idRef = new AtomicReference<>(); + + LinkedList parameters = new LinkedList<>(); + + parameters.add(new DSpaceCommandLineParameter("-u", itemOne.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-u", itemTwo.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-u", itemThree.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-f", "test.json")); + + + List list = + parameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + + context.restoreAuthSystemState(); + + try { + String token = getAuthToken(admin.getEmail(), password); + + getClient(token) + .perform( + multipart(CURATE_SCRIPT_ENDPOINT) + .file(bitstreamFile) + .param("properties", new ObjectMapper().writeValueAsString(list))) + .andExpect(status().isAccepted()) + .andDo(result -> idRef.set(read(result.getResponse().getContentAsString(), "$.processId"))); + + itemOne = context.reloadEntity(itemOne); + itemTwo = context.reloadEntity(itemTwo); + itemThree = context.reloadEntity(itemThree); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + assertThat(itemOne.getResourcePolicies(), hasSize(1)); + assertThat(itemTwo.getResourcePolicies(), hasSize(1)); + assertThat(itemThree.getResourcePolicies(), hasSize(1)); + + assertThat(itemOne.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, "openaccess", TYPE_CUSTOM) + )); + + assertThat(itemTwo.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, "openaccess", TYPE_CUSTOM) + )); + + assertThat(itemThree.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, "openaccess", TYPE_CUSTOM) + )); + + } finally { + ProcessBuilder.deleteProcess(idRef.get()); + } + } + + @Test + public void bulkAccessScriptWithoutTargetUUIDParameterTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .withAdminGroup(eperson) + .build(); + + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token) + .perform( + multipart(CURATE_SCRIPT_ENDPOINT) + .param("properties", new ObjectMapper().writeValueAsString(List.of())) + ) + .andExpect(status().isInternalServerError()) + .andExpect(result -> assertTrue(result.getResolvedException() + .getMessage() + .contains("At least one target uuid must be provided"))); + } + + @Test + public void bulkAccessScriptWithNormalUserTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); + + MockMultipartFile bitstreamFile = + new MockMultipartFile("file", "test.json", MediaType.TEXT_PLAIN_VALUE, inputStream); + + LinkedList parameters = new LinkedList<>(); + + parameters.add(new DSpaceCommandLineParameter("-u", parentCommunity.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-f", "test.json")); + + + List list = + parameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token) + .perform( + multipart(CURATE_SCRIPT_ENDPOINT) + .file(bitstreamFile) + .param("properties", new ObjectMapper().writeValueAsString(list))) + .andExpect(status().isForbidden()); + } + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/itemimport/ItemImportIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/itemimport/ItemImportIT.java index d839ab81fba9..f3bbae17be17 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/itemimport/ItemImportIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/itemimport/ItemImportIT.java @@ -14,6 +14,7 @@ import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.multipart; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; @@ -82,6 +83,7 @@ public class ItemImportIT extends AbstractEntityIntegrationTest { private DSpaceRunnableParameterConverter dSpaceRunnableParameterConverter; private Collection collection; private Path workDir; + private static final String TEMP_DIR = ItemImport.TEMP_DIR; @Before @Override @@ -126,6 +128,10 @@ public void importItemByZipSafWithBitstreams() throws Exception { checkMetadata(); checkMetadataWithAnotherSchema(); checkBitstream(); + + // confirm that TEMP_DIR still exists + File workTempDir = new File(workDir + File.separator + TEMP_DIR); + assertTrue(workTempDir.exists()); } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/opensearch/OpenSearchControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/opensearch/OpenSearchControllerIT.java index ac03e946e320..1ddea619d2fc 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/opensearch/OpenSearchControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/opensearch/OpenSearchControllerIT.java @@ -249,4 +249,24 @@ public void serviceDocumentTest() throws Exception { */ } + + @Test + public void emptyDescriptionTest() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection collection1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1") + .build(); + + getClient().perform(get("/opensearch/search") + .param("format", "rss") + .param("scope", collection1.getID().toString()) + .param("query", "*")) + .andExpect(status().isOk()) + .andExpect(xpath("rss/channel/description").string("No Description")); + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthenticationRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthenticationRestControllerIT.java index e4ed507d473c..05dab9905681 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthenticationRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthenticationRestControllerIT.java @@ -9,6 +9,7 @@ import static com.jayway.jsonpath.JsonPath.read; import static java.lang.Thread.sleep; +import static org.dspace.app.rest.matcher.GroupMatcher.matchGroupWithName; import static org.dspace.app.rest.security.StatelessAuthenticationFilter.ON_BEHALF_OF_REQUEST_PARAM; import static org.dspace.app.rest.utils.RegexUtils.REGEX_UUID; import static org.hamcrest.MatcherAssert.assertThat; @@ -55,6 +56,7 @@ import org.dspace.app.rest.matcher.AuthenticationStatusMatcher; import org.dspace.app.rest.matcher.AuthorizationMatcher; import org.dspace.app.rest.matcher.EPersonMatcher; +import org.dspace.app.rest.matcher.GroupMatcher; import org.dspace.app.rest.matcher.HalMatcher; import org.dspace.app.rest.model.EPersonRest; import org.dspace.app.rest.projection.DefaultProjection; @@ -105,6 +107,7 @@ public class AuthenticationRestControllerIT extends AbstractControllerIntegratio public static final String[] PASS_ONLY = {"org.dspace.authenticate.PasswordAuthentication"}; public static final String[] SHIB_ONLY = {"org.dspace.authenticate.ShibAuthentication"}; + public static final String[] ORCID_ONLY = { "org.dspace.authenticate.OrcidAuthentication" }; public static final String[] SHIB_AND_PASS = { "org.dspace.authenticate.ShibAuthentication", "org.dspace.authenticate.PasswordAuthentication" @@ -113,6 +116,10 @@ public class AuthenticationRestControllerIT extends AbstractControllerIntegratio "org.dspace.authenticate.IPAuthentication", "org.dspace.authenticate.ShibAuthentication" }; + public static final String[] PASS_AND_IP = { + "org.dspace.authenticate.PasswordAuthentication", + "org.dspace.authenticate.IPAuthentication" + }; // see proxies.trusted.ipranges in local.cfg public static final String TRUSTED_IP = "7.7.7.7"; @@ -172,6 +179,101 @@ public void testStatusAuthenticatedAsAdmin() throws Exception { .andExpect(status().isNoContent()); } + /** + * This test verifies: + * - that a logged in via password user finds the expected specialGroupPwd in _embedded.specialGroups; + * - that a logged in via password and specific IP user finds the expected specialGroupPwd and specialGroupIP + * in _embedded.specialGroups; + * - that a not logged in user with a specific IP finds the expected specialGroupIP in _embedded.specialGroups; + * @throws Exception + */ + @Test + public void testStatusGetSpecialGroups() throws Exception { + context.turnOffAuthorisationSystem(); + + Group specialGroupPwd = GroupBuilder.createGroup(context) + .withName("specialGroupPwd") + .build(); + Group specialGroupIP = GroupBuilder.createGroup(context) + .withName("specialGroupIP") + .build(); + + configurationService.setProperty("plugin.sequence.org.dspace.authenticate.AuthenticationMethod", PASS_AND_IP); + configurationService.setProperty("authentication-password.login.specialgroup","specialGroupPwd"); + configurationService.setProperty("authentication-ip.specialGroupIP", "123.123.123.123"); + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(get("/api/authn/status").param("projection", "full")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", AuthenticationStatusMatcher.matchFullEmbeds())) + .andExpect(jsonPath("$", AuthenticationStatusMatcher.matchLinks())) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.okay", is(true))) + .andExpect(jsonPath("$.authenticated", is(true))) + .andExpect(jsonPath("$.authenticationMethod", is("password"))) + .andExpect(jsonPath("$.type", is("status"))) + .andExpect(jsonPath("$._links.specialGroups.href", startsWith(REST_SERVER_URL))) + .andExpect(jsonPath("$._embedded.specialGroups._embedded.specialGroups", + Matchers.containsInAnyOrder( + GroupMatcher.matchGroupWithName("specialGroupPwd")))); + + // try the special groups link endpoint in the same scenario than above + getClient(token).perform(get("/api/authn/status/specialGroups").param("projection", "full")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.specialGroups", + Matchers.containsInAnyOrder( + GroupMatcher.matchGroupWithName("specialGroupPwd")))); + + getClient(token).perform(get("/api/authn/status").param("projection", "full") + .with(ip("123.123.123.123"))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", AuthenticationStatusMatcher.matchFullEmbeds())) + .andExpect(jsonPath("$", AuthenticationStatusMatcher.matchLinks())) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.okay", is(true))) + .andExpect(jsonPath("$.authenticated", is(true))) + .andExpect(jsonPath("$.authenticationMethod", is("password"))) + .andExpect(jsonPath("$.type", is("status"))) + .andExpect(jsonPath("$._links.specialGroups.href", startsWith(REST_SERVER_URL))) + .andExpect(jsonPath("$._embedded.specialGroups._embedded.specialGroups", + Matchers.containsInAnyOrder( + GroupMatcher.matchGroupWithName("specialGroupPwd"), + GroupMatcher.matchGroupWithName("specialGroupIP")))); + + // try the special groups link endpoint in the same scenario than above + getClient(token).perform(get("/api/authn/status/specialGroups").param("projection", "full") + .with(ip("123.123.123.123"))) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.specialGroups", + Matchers.containsInAnyOrder( + GroupMatcher.matchGroupWithName("specialGroupPwd"), + GroupMatcher.matchGroupWithName("specialGroupIP")))); + + getClient().perform(get("/api/authn/status").param("projection", "full").with(ip("123.123.123.123"))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", AuthenticationStatusMatcher.matchFullEmbeds())) + // fails due to bug https://github.com/DSpace/DSpace/issues/8274 + //.andExpect(jsonPath("$", AuthenticationStatusMatcher.matchLinks())) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.okay", is(true))) + .andExpect(jsonPath("$.authenticated", is(false))) + .andExpect(jsonPath("$._embedded.specialGroups._embedded.specialGroups", + Matchers.containsInAnyOrder(GroupMatcher.matchGroupWithName("specialGroupIP")))); + + // try the special groups link endpoint in the same scenario than above + getClient().perform(get("/api/authn/status/specialGroups").param("projection", "full") + .with(ip("123.123.123.123"))) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.specialGroups", + Matchers.containsInAnyOrder( + GroupMatcher.matchGroupWithName("specialGroupIP")))); + } + @Test @Ignore // Ignored until an endpoint is added to return all groups. Anonymous is not considered a direct group. @@ -1492,60 +1594,6 @@ public void testGenerateMachineTokenToDownloadBitstream() throws Exception { } - @Test - public void testGenerateMachineTokenWithSpecialGroups() throws Exception { - context.turnOffAuthorisationSystem(); - - EPerson user = EPersonBuilder.createEPerson(context) - .withCanLogin(true) - .withPassword(password) - .withEmail("myuser@test.com") - .build(); - - Group specialGroup = GroupBuilder.createGroup(context) - .withName("Special group") - .build(); - - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent community") - .build(); - - Collection collection = CollectionBuilder.createCollection(context, parentCommunity) - .withName("Collection") - .build(); - - Item item = ItemBuilder.createItem(context, collection) - .withReaderGroup(specialGroup) - .build(); - - context.restoreAuthSystemState(); - - String token = getAuthToken(user.getEmail(), password); - - getClient(token).perform(get("/api/core/items/" + item.getID())) - .andExpect(status().isForbidden()); - - configurationService.setProperty("authentication-password.login.specialgroup", "Special group"); - - token = getAuthToken(user.getEmail(), password); - - configurationService.setProperty("authentication-password.login.specialgroup", null); - - getClient(token).perform(get("/api/core/items/" + item.getID())) - .andExpect(status().isOk()); - - AtomicReference machineToken = new AtomicReference<>(); - - getClient(token).perform(post("/api/authn/machinetokens")) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.token", notNullValue())) - .andExpect(jsonPath("$.type", is("machinetoken"))) - .andDo(result -> machineToken.set(read(result.getResponse().getContentAsString(), "$.token"))); - - getClient(machineToken.get()).perform(get("/api/core/items/" + item.getID())) - .andExpect(status().isOk()); - } - @Test public void testGenerateMachineTokenWithAnonymousUser() throws Exception { @@ -1648,6 +1696,71 @@ public void testDeleteMachineTokenWithAnonymousUser() throws Exception { .andExpect(status().isUnauthorized()); } + @Test + public void testAreSpecialGroupsApplicable() throws Exception { + context.turnOffAuthorisationSystem(); + + GroupBuilder.createGroup(context) + .withName("specialGroupPwd") + .build(); + GroupBuilder.createGroup(context) + .withName("specialGroupShib") + .build(); + + configurationService.setProperty("plugin.sequence.org.dspace.authenticate.AuthenticationMethod", SHIB_AND_PASS); + configurationService.setProperty("authentication-password.login.specialgroup", "specialGroupPwd"); + configurationService.setProperty("authentication-shibboleth.role.faculty", "specialGroupShib"); + configurationService.setProperty("authentication-shibboleth.default-roles", "faculty"); + + context.restoreAuthSystemState(); + + String passwordToken = getAuthToken(eperson.getEmail(), password); + + getClient(passwordToken).perform(get("/api/authn/status").param("projection", "full")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", AuthenticationStatusMatcher.matchFullEmbeds())) + .andExpect(jsonPath("$", AuthenticationStatusMatcher.matchLinks())) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.okay", is(true))) + .andExpect(jsonPath("$.authenticated", is(true))) + .andExpect(jsonPath("$.authenticationMethod", is("password"))) + .andExpect(jsonPath("$.type", is("status"))) + .andExpect(jsonPath("$._links.specialGroups.href", startsWith(REST_SERVER_URL))) + .andExpect(jsonPath("$._embedded.specialGroups._embedded.specialGroups", + Matchers.containsInAnyOrder(matchGroupWithName("specialGroupPwd")))); + + getClient(passwordToken).perform(get("/api/authn/status/specialGroups").param("projection", "full")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.specialGroups", + Matchers.containsInAnyOrder(matchGroupWithName("specialGroupPwd")))); + + String shibToken = getClient().perform(post("/api/authn/login") + .requestAttr("SHIB-MAIL", eperson.getEmail()) + .requestAttr("SHIB-SCOPED-AFFILIATION", "faculty;staff")) + .andExpect(status().isOk()) + .andReturn().getResponse().getHeader(AUTHORIZATION_HEADER).replace(AUTHORIZATION_TYPE, ""); + + getClient(shibToken).perform(get("/api/authn/status").param("projection", "full")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", AuthenticationStatusMatcher.matchFullEmbeds())) + .andExpect(jsonPath("$", AuthenticationStatusMatcher.matchLinks())) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.okay", is(true))) + .andExpect(jsonPath("$.authenticated", is(true))) + .andExpect(jsonPath("$.authenticationMethod", is("shibboleth"))) + .andExpect(jsonPath("$.type", is("status"))) + .andExpect(jsonPath("$._links.specialGroups.href", startsWith(REST_SERVER_URL))) + .andExpect(jsonPath("$._embedded.specialGroups._embedded.specialGroups", + Matchers.containsInAnyOrder(matchGroupWithName("specialGroupShib")))); + + getClient(shibToken).perform(get("/api/authn/status/specialGroups").param("projection", "full")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.specialGroups", + Matchers.containsInAnyOrder(matchGroupWithName("specialGroupShib")))); + } + // Get a short-lived token based on an active login token private String getShortLivedToken(String loginToken) throws Exception { ObjectMapper mapper = new ObjectMapper(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthorizationRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthorizationRestRepositoryIT.java index 67185f2cdab2..6a51649b5bce 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthorizationRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthorizationRestRepositoryIT.java @@ -2796,7 +2796,6 @@ public void verifySpecialGroupForNonAdministrativeUsersTest() throws Exception { simpleArticle.getInputStream()) .withIssueDate("2022-07-15") .withSubject("Entry") - .withEntityType("Publication") .grantLicense() .build(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java index 1a6cc29ca75c..fd128269308d 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java @@ -56,7 +56,7 @@ public class BitstreamFormatRestRepositoryIT extends AbstractControllerIntegrati @Autowired private BitstreamFormatConverter bitstreamFormatConverter; - private final int DEFAULT_AMOUNT_FORMATS = 81; + private final int DEFAULT_AMOUNT_FORMATS = 85; @Test public void findAllPaginationTest() throws Exception { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java index bc62143ccdf7..92cff6db2192 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java @@ -17,12 +17,14 @@ import static org.dspace.builder.ResourcePolicyBuilder.createResourcePolicy; import static org.dspace.content.BitstreamFormat.KNOWN; import static org.dspace.content.BitstreamFormat.SUPPORTED; +import static org.dspace.core.Constants.DEFAULT_BITSTREAM_READ; import static org.dspace.core.Constants.READ; import static org.dspace.core.Constants.WRITE; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; @@ -34,6 +36,7 @@ import static org.springframework.http.MediaType.parseMediaType; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.head; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.header; @@ -56,6 +59,7 @@ import org.apache.pdfbox.text.PDFTextStripper; import org.apache.solr.client.solrj.SolrServerException; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.builder.BitstreamBuilder; import org.dspace.builder.CollectionBuilder; @@ -70,6 +74,7 @@ import org.dspace.content.Item; import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.CollectionService; import org.dspace.core.Constants; import org.dspace.disseminate.CitationDocumentServiceImpl; import org.dspace.eperson.EPerson; @@ -95,6 +100,8 @@ */ public class BitstreamRestControllerIT extends AbstractControllerIntegrationTest { + public static final String[] PASS_ONLY = {"org.dspace.authenticate.PasswordAuthentication"}; + protected SolrLoggerService solrLoggerService = StatisticsServiceFactory.getInstance().getSolrLoggerService(); @Autowired @@ -112,6 +119,12 @@ public class BitstreamRestControllerIT extends AbstractControllerIntegrationTest @Autowired private BitstreamFormatService bitstreamFormatService; + @Autowired + private AuthorizeService authorizeService; + + @Autowired + private CollectionService collectionService; + private Bitstream bitstream; private BitstreamFormat supportedFormat; private BitstreamFormat knownFormat; @@ -626,6 +639,54 @@ public void testPrivateBitstream() throws Exception { } + @Test + public void testBitstreamDefaultReadInheritanceFromCollection() throws Exception { + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community and one collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Group internalGroup = GroupBuilder.createGroup(context) + .withName("Internal Group") + .build(); + // Explicitly create a restrictive default bitstream read policy on the collection + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1").build(); + authorizeService.removePoliciesActionFilter(context, col1, DEFAULT_BITSTREAM_READ); + authorizeService.addPolicy(context, col1, DEFAULT_BITSTREAM_READ, internalGroup); + + //2. A public item with a new bitstream that is not explicitly restricted + // but should instead inherit + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .build(); + // make sure this item has no default policies for a new bundle to inherit + authorizeService.removePoliciesActionFilter(context, publicItem1, DEFAULT_BITSTREAM_READ); + + String bitstreamContent = "Private!"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder + .createBitstream(context, publicItem1, is) + .withName("Test Restricted Bitstream") + .withDescription("This bitstream is restricted") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + //** WHEN ** + //We download the bitstream + getClient().perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + //** THEN ** + .andExpect(status().isUnauthorized()); + + //An unauthorized request should not log statistics + checkNumberOfStatsRecords(bitstream, 0); + } + @Test public void restrictedGroupBitstreamForbiddenTest() throws Exception { context.turnOffAuthorisationSystem(); @@ -686,52 +747,68 @@ public void restrictedGroupBitstreamForbiddenTest() throws Exception { @Test public void restrictedSpecialGroupBitstreamTest() throws Exception { - context.turnOffAuthorisationSystem(); - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); + String authenticationMethod = + configurationService.getProperty("plugin.sequence.org.dspace.authenticate.AuthenticationMethod"); - Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) - .withName("Collection 1") - .build(); + try { - Group restrictedGroup = GroupBuilder.createGroup(context) - .withName("Restricted Group") - .build(); + configurationService.setProperty("plugin.sequence.org.dspace.authenticate.AuthenticationMethod", PASS_ONLY); - String bitstreamContent = "Private!"; - try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + context.turnOffAuthorisationSystem(); - Item item = ItemBuilder.createItem(context, col1) - .withTitle("item 1") - .withIssueDate("2013-01-17") - .withAuthor("Doe, John") - .build(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); - bitstream = BitstreamBuilder - .createBitstream(context, item, is) - .withName("Test Embargoed Bitstream") - .withDescription("This bitstream is embargoed") - .withMimeType("text/plain") - .withReaderGroup(restrictedGroup) - .build(); - } + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); - context.restoreAuthSystemState(); + Group restrictedGroup = GroupBuilder.createGroup(context) + .withName("Restricted Group") + .build(); - String authToken = getAuthToken(eperson.getEmail(), password); - getClient(authToken).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) - .andExpect(status().isForbidden()); + String bitstreamContent = "Private!"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { - configurationService.setProperty("authentication-password.login.specialgroup", "Restricted Group"); + Item item = ItemBuilder.createItem(context, col1) + .withTitle("item 1") + .withIssueDate("2013-01-17") + .withAuthor("Doe, John") + .build(); - authToken = getAuthToken(eperson.getEmail(), password); - getClient(authToken).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) - .andExpect(status().isOk()); + bitstream = BitstreamBuilder + .createBitstream(context, item, is) + .withName("Test Embargoed Bitstream") + .withDescription("This bitstream is embargoed") + .withMimeType("text/plain") + .withReaderGroup(restrictedGroup) + .build(); + } - checkNumberOfStatsRecords(bitstream, 1); + context.restoreAuthSystemState(); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + .andExpect(status().isForbidden()); + getClient(authToken).perform(post("/api/authn/logout")).andExpect(status().isNoContent()); + + configurationService.setProperty("authentication-password.login.specialgroup", "Restricted Group"); + + authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + .andExpect(status().isOk()); + + checkNumberOfStatsRecords(bitstream, 1); + + } finally { + configurationService.setProperty( + "plugin.sequence.org.dspace.authenticate.AuthenticationMethod", + authenticationMethod + ); + } } @Test @@ -1265,4 +1342,57 @@ public void testEmbargoedBitstreamWithCrisSecurity() throws Exception { checkNumberOfStatsRecords(bitstream, 2); } + + @Test + public void checkContentDispositionOfFormats() throws Exception { + configurationService.setProperty("webui.content_disposition_format", new String[] { + "text/richtext", + "text/xml", + "txt" + }); + + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + Item item = ItemBuilder.createItem(context, collection).build(); + String content = "Test Content"; + Bitstream rtf; + Bitstream xml; + Bitstream txt; + Bitstream html; + try (InputStream is = IOUtils.toInputStream(content, CharEncoding.UTF_8)) { + rtf = BitstreamBuilder.createBitstream(context, item, is) + .withMimeType("text/richtext").build(); + xml = BitstreamBuilder.createBitstream(context, item, is) + .withMimeType("text/xml").build(); + txt = BitstreamBuilder.createBitstream(context, item, is) + .withMimeType("text/plain").build(); + html = BitstreamBuilder.createBitstream(context, item, is) + .withMimeType("text/html").build(); + } + context.restoreAuthSystemState(); + + // these formats are configured and files should be downloaded + verifyBitstreamDownload(rtf, "text/richtext;charset=UTF-8", true); + verifyBitstreamDownload(xml, "text/xml;charset=UTF-8", true); + verifyBitstreamDownload(txt, "text/plain;charset=UTF-8", true); + // this format is not configured and should open inline + verifyBitstreamDownload(html, "text/html;charset=UTF-8", false); + } + + private void verifyBitstreamDownload(Bitstream file, String contentType, boolean shouldDownload) throws Exception { + String token = getAuthToken(admin.getEmail(), password); + String header = getClient(token).perform(get("/api/core/bitstreams/" + file.getID() + "/content") + .header("Accept", contentType)) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andReturn().getResponse().getHeader("content-disposition"); + if (shouldDownload) { + assertTrue(header.contains("attachment")); + assertFalse(header.contains("inline")); + } else { + assertTrue(header.contains("inline")); + assertFalse(header.contains("attachment")); + } + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java index b850d973e4f3..f3aee57f5190 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java @@ -7,10 +7,13 @@ */ package org.dspace.app.rest; +import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND; +import static javax.servlet.http.HttpServletResponse.SC_OK; import static org.apache.commons.codec.CharEncoding.UTF_8; import static org.apache.commons.io.IOUtils.toInputStream; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadataDoesNotExist; +import static org.dspace.app.rest.repository.patch.operation.BitstreamRemoveOperation.OPERATION_PATH_BITSTREAM_REMOVE; import static org.dspace.core.Constants.WRITE; import static org.hamcrest.CoreMatchers.allOf; import static org.hamcrest.Matchers.contains; @@ -19,6 +22,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; +import static org.junit.Assert.assertEquals; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; @@ -27,9 +31,11 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.io.InputStream; +import java.util.ArrayList; import java.util.Comparator; import java.util.List; import java.util.UUID; +import javax.ws.rs.core.MediaType; import org.apache.commons.codec.CharEncoding; import org.apache.commons.io.IOUtils; @@ -39,6 +45,7 @@ import org.dspace.app.rest.matcher.HalMatcher; import org.dspace.app.rest.matcher.MetadataMatcher; import org.dspace.app.rest.model.patch.Operation; +import org.dspace.app.rest.model.patch.RemoveOperation; import org.dspace.app.rest.model.patch.ReplaceOperation; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.rest.test.MetadataPatchSuite; @@ -61,16 +68,21 @@ import org.dspace.content.WorkspaceItem; import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.util.UUIDUtils; import org.hamcrest.Matchers; +import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.web.servlet.MvcResult; public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest { @@ -89,6 +101,12 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest @Autowired private ItemService itemService; + @Autowired + CollectionService collectionService; + + @Autowired + CommunityService communityService; + @Test public void findAllTest() throws Exception { //We turn off the authorization system in order to create the structure as defined below @@ -346,7 +364,7 @@ public void findOneBitstreamTest_EmbargoedBitstream_Anon() throws Exception { Item publicItem1; Bitstream bitstream; - try (InputStream is = IOUtils.toInputStream(bitstreamContent, org.apache.commons.lang3.CharEncoding.UTF_8)) { + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { publicItem1 = ItemBuilder.createItem(context, col1) .withTitle("Public item 1") @@ -1320,6 +1338,7 @@ public void patchReplaceMultipleDescriptionBitstream() throws Exception { bitstream = BitstreamBuilder. createBitstream(context, publicItem1, is) .withName("Bitstream") + //.withMimeType("text/plain") .build(); } @@ -1742,6 +1761,53 @@ public void thumbnailEndpointTest() throws Exception { .andExpect(jsonPath("$.type", is("bitstream"))); } + @Test + public void thumbnailEndpointTestWithSpecialCharactersInFileName() throws Exception { + // Given an Item + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1").build(); + + Item item = ItemBuilder.createItem(context, col1) + .withTitle("Test item -- thumbnail") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .build(); + + Bundle originalBundle = BundleBuilder.createBundle(context, item) + .withName(Constants.DEFAULT_BUNDLE_NAME) + .build(); + Bundle thumbnailBundle = BundleBuilder.createBundle(context, item) + .withName("THUMBNAIL") + .build(); + + InputStream is = IOUtils.toInputStream("dummy", "utf-8"); + + // With an ORIGINAL Bitstream & matching THUMBNAIL Bitstream containing special characters in filenames + Bitstream bitstream = BitstreamBuilder.createBitstream(context, originalBundle, is) + .withName("test (2023) file.pdf") + .withMimeType("application/pdf") + .build(); + Bitstream thumbnail = BitstreamBuilder.createBitstream(context, thumbnailBundle, is) + .withName("test (2023) file.pdf.jpg") + .withMimeType("image/jpeg") + .build(); + + context.restoreAuthSystemState(); + + String tokenAdmin = getAuthToken(admin.getEmail(), password); + + getClient(tokenAdmin).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/thumbnail")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.uuid", Matchers.is(thumbnail.getID().toString()))) + .andExpect(jsonPath("$.type", is("bitstream"))); + } + @Test public void thumbnailEndpointMultipleThumbnailsWithPrimaryBitstreamTest() throws Exception { // Given an Item @@ -2925,4 +2991,514 @@ public void findShowableByItem() throws Exception { ); } + @Test + public void deleteBitstreamsInBulk() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + + // Verify that only the three bitstreams were deleted and the fourth one still exists + Assert.assertTrue(bitstreamNotFound(token, bitstream1, bitstream2, bitstream3)); + Assert.assertTrue(bitstreamExists(token, bitstream4)); + } + + @Test + public void deleteBitstreamsInBulk_invalidUUID() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + // For the third bitstream, use an invalid UUID + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + UUID randomUUID = UUID.randomUUID(); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + randomUUID); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + + MvcResult result = getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isUnprocessableEntity()) + .andReturn(); + + // Verify our custom error message is returned when an invalid UUID is used + assertEquals("Bitstream with uuid " + randomUUID + " could not be found in the repository", + result.getResponse().getErrorMessage()); + + // Verify that no bitstreams were deleted since the request was invalid + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + } + + @Test + public void deleteBitstreamsInBulk_invalidRequestSize() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + // But set the rest.patch.operations.limit property to 2, so that the request is invalid + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + DSpaceServicesFactory.getInstance().getConfigurationService().setProperty("rest.patch.operations.limit", 2); + + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isBadRequest()); + + // Verify that no bitstreams were deleted since the request was invalid + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + } + + @Test + public void deleteBitstreamsInBulk_Unauthorized() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + + getClient().perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isUnauthorized()); + } + + @Test + public void deleteBitstreamsInBulk_Forbidden() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isForbidden()); + } + + @Test + public void deleteBitstreamsInBulk_collectionAdmin() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + Collection col2 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 2") + .build(); + EPerson col1Admin = EPersonBuilder.createEPerson(context) + .withEmail("col1admin@test.com") + .withPassword(password) + .build(); + EPerson col2Admin = EPersonBuilder.createEPerson(context) + .withEmail("col2admin@test.com") + .withPassword(password) + .build(); + Group col1_AdminGroup = collectionService.createAdministrators(context, col1); + Group col2_AdminGroup = collectionService.createAdministrators(context, col2); + groupService.addMember(context, col1_AdminGroup, col1Admin); + groupService.addMember(context, col2_AdminGroup, col2Admin); + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, col2) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + + String token = getAuthToken(col1Admin.getEmail(), password); + // Should return forbidden since one of the bitstreams does not originate form collection 1 + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isForbidden()); + + // Remove the bitstream that does not originate from the collection we are administrator of, should return OK + ops.remove(2); + patchBody = getPatchContent(ops); + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + + // Change the token to the admin of collection 2 + token = getAuthToken(col2Admin.getEmail(), password); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + ops = new ArrayList<>(); + removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp1); + removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp2); + removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream4.getID()); + ops.add(removeOp3); + patchBody = getPatchContent(ops); + + // Should return forbidden since one of the bitstreams does not originate form collection 2 + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isForbidden()); + // Remove the bitstream that does not originate from the collection we are administrator of, should return OK + ops.remove(0); + patchBody = getPatchContent(ops); + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + } + + @Test + public void deleteBitstreamsInBulk_communityAdmin() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + Collection col2 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 2") + .build(); + EPerson parentCommunityAdmin = EPersonBuilder.createEPerson(context) + .withEmail("parentComAdmin@test.com") + .withPassword(password) + .build(); + Group parentComAdminGroup = communityService.createAdministrators(context, parentCommunity); + groupService.addMember(context, parentComAdminGroup, parentCommunityAdmin); + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, col2) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + + String token = getAuthToken(parentCommunityAdmin.getEmail(), password); + // Bitstreams originate from two different collections, but those collections live in the same community, so + // a community admin should be able to delete them + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + } + + public boolean bitstreamExists(String token, Bitstream ...bitstreams) throws Exception { + for (Bitstream bitstream : bitstreams) { + if (getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID())) + .andReturn().getResponse().getStatus() != SC_OK) { + return false; + } + } + return true; + } + + public boolean bitstreamNotFound(String token, Bitstream ...bitstreams) throws Exception { + for (Bitstream bitstream : bitstreams) { + if (getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID())) + .andReturn().getResponse().getStatus() != SC_NOT_FOUND) { + return false; + } + } + return true; + } + + } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java index 427235e1367a..41ee6be0e17e 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java @@ -8,6 +8,7 @@ package org.dspace.app.rest; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -63,15 +64,15 @@ public void findAll() throws Exception { //We expect the content type to be "application/hal+json;charset=UTF-8" .andExpect(content().contentType(contentType)) - //Our default Discovery config has 4 browse indexes so we expect this to be reflected in the page + //Our default Discovery config has 5 browse indexes, so we expect this to be reflected in the page // object .andExpect(jsonPath("$.page.size", is(20))) - .andExpect(jsonPath("$.page.totalElements", is(11))) + .andExpect(jsonPath("$.page.totalElements", is(14))) .andExpect(jsonPath("$.page.totalPages", is(1))) .andExpect(jsonPath("$.page.number", is(0))) //The array of browse index should have a size 4 - .andExpect(jsonPath("$._embedded.browses", hasSize(11))) + .andExpect(jsonPath("$._embedded.browses", hasSize(14))) //Check that all (and only) the default browse indexes are present .andExpect(jsonPath("$._embedded.browses", containsInAnyOrder( @@ -81,11 +82,18 @@ public void findAll() throws Exception { BrowseIndexMatcher.subjectBrowseIndex("asc"), BrowseIndexMatcher.rodeptBrowseIndex("asc"), BrowseIndexMatcher.typeBrowseIndex("asc"), + BrowseIndexMatcher.rpdeptBrowseIndex("asc"), BrowseIndexMatcher.rpnameBrowseIndex("asc"), BrowseIndexMatcher.ounameBrowseIndex("asc"), + BrowseIndexMatcher.eqtitleBrowseIndex("asc"), + BrowseIndexMatcher.typesBrowseIndex(), BrowseIndexMatcher.pjtitleBrowseIndex("asc"), - BrowseIndexMatcher.rpdeptBrowseIndex("asc"), - BrowseIndexMatcher.eqtitleBrowseIndex("asc") + BrowseIndexMatcher.hierarchicalBrowseIndex( + "publication-coar-types", "itemtype", "dc.type" + ), + BrowseIndexMatcher.hierarchicalBrowseIndex( + "srsc", "subject", "dc.subject" + ) ))) ; } @@ -132,6 +140,25 @@ public void findBrowseByContributor() throws Exception { ; } + @Test + public void findBrowseByVocabulary() throws Exception { + //Use srsc as this vocabulary is included by default + //When we call the root endpoint + getClient().perform(get("/api/discover/browses/srsc")) + //The status has to be 200 OK + .andExpect(status().isOk()) + //We expect the content type to be "application/hal+json;charset=UTF-8" + .andExpect(content().contentType(contentType)) + //Check that the JSON root matches the expected browse index + .andExpect( + jsonPath( + "$", + BrowseIndexMatcher.hierarchicalBrowseIndex("srsc", "subject", "dc.subject") + ) + ) + ; + } + @Test public void findBrowseBySubject() throws Exception { //When we call the root endpoint @@ -852,6 +879,7 @@ public void testPaginationBrowseByDateIssuedItems() throws Exception { ))); } + @Test public void testBrowseByEntriesStartsWith() throws Exception { context.turnOffAuthorisationSystem(); @@ -997,7 +1025,7 @@ public void testBrowseByEntriesStartsWith() throws Exception { //Verify that the startsWith paramater is included in the links .andExpect(jsonPath("$._links.self.href", containsString("?startsWith=C"))); - }; + } @Test public void testBrowseByEntriesStartsWithAndDiacritics() throws Exception { @@ -1157,7 +1185,7 @@ public void testBrowseByEntriesStartsWithAndDiacritics() throws Exception { //Verify that the startsWith paramater is included in the links .andExpect(jsonPath("$._links.self.href", containsString("?startsWith=Guión"))); - }; + } @Test public void testBrowseByItemsStartsWith() throws Exception { @@ -1708,7 +1736,7 @@ public void findOneLinked() throws Exception { // The browse definition ID should be "author" .andExpect(jsonPath("$.id", is("author"))) // It should be configured as a metadata browse - .andExpect(jsonPath("$.metadataBrowse", is(true))) + .andExpect(jsonPath("$.browseType", is(BROWSE_TYPE_VALUE_LIST))) ; } @@ -1725,7 +1753,7 @@ public void findOneLinkedPassingTwoFields() throws Exception { // The browse definition ID should be "author" .andExpect(jsonPath("$.id", is("author"))) // It should be configured as a metadata browse - .andExpect(jsonPath("$.metadataBrowse", is(true))); + .andExpect(jsonPath("$.browseType", is(BROWSE_TYPE_VALUE_LIST))); } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BulkAccessConditionRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BulkAccessConditionRestRepositoryIT.java new file mode 100644 index 000000000000..ecca60c7e53f --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BulkAccessConditionRestRepositoryIT.java @@ -0,0 +1,256 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; +import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.matcher.AccessConditionOptionMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.hamcrest.Matchers; +import org.junit.Test; + +/** + * Integration test class for the bulkaccessconditionoptions endpoint. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class BulkAccessConditionRestRepositoryIT extends AbstractControllerIntegrationTest { + + @Test + public void findAllByAdminUserTest() throws Exception { + String authToken = getAuthToken(admin.getEmail(), password); + getClient(authToken) + .perform(get("/api/config/bulkaccessconditionoptions")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))) + .andExpect(jsonPath("$._embedded.bulkaccessconditionoptions", containsInAnyOrder(allOf( + hasJsonPath("$.id", is("default")), + hasJsonPath("$.itemAccessConditionOptions", Matchers.containsInAnyOrder( + AccessConditionOptionMatcher.matchAccessConditionOption("openaccess", false , false, null, null), + AccessConditionOptionMatcher.matchAccessConditionOption("embargo", true , false, "+36MONTHS", null), + AccessConditionOptionMatcher.matchAccessConditionOption("administrator", false , false, null, null), + AccessConditionOptionMatcher.matchAccessConditionOption("lease", false , true, null, "+6MONTHS")) + ), + hasJsonPath("$.bitstreamAccessConditionOptions", Matchers.containsInAnyOrder( + AccessConditionOptionMatcher.matchAccessConditionOption("openaccess", false , false, null, null), + AccessConditionOptionMatcher.matchAccessConditionOption("embargo", true , false, "+36MONTHS", null), + AccessConditionOptionMatcher.matchAccessConditionOption("administrator", false , false, null, null), + AccessConditionOptionMatcher.matchAccessConditionOption("lease", false , true, null, "+6MONTHS")) + ))))); + } + + @Test + public void findAllByAdminUserOfAnCommunityTest() throws Exception { + + context.turnOffAuthorisationSystem(); + + // create community and assign eperson to admin group + CommunityBuilder.createCommunity(context) + .withName("community") + .withAdminGroup(eperson) + .build(); + + context.restoreAuthSystemState(); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/config/bulkaccessconditionoptions")) + .andExpect(status().isOk()); + } + + @Test + public void findAllByAdminUserOfAnCollectionTest() throws Exception { + + context.turnOffAuthorisationSystem(); + + Community community = + CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + // create collection and assign eperson to admin group + CollectionBuilder.createCollection(context, community) + .withName("collection") + .withAdminGroup(eperson) + .build(); + + context.restoreAuthSystemState(); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/config/bulkaccessconditionoptions")) + .andExpect(status().isOk()); + } + + @Test + public void findAllByAdminUserOfAnItemTest() throws Exception { + + context.turnOffAuthorisationSystem(); + + Community community = + CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = + CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + // create item and assign eperson as admin user + ItemBuilder.createItem(context, collection) + .withTitle("item") + .withAdminUser(eperson) + .build(); + + context.restoreAuthSystemState(); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/config/bulkaccessconditionoptions")) + .andExpect(status().isOk()); + } + + @Test + public void findAllByNormalUserTest() throws Exception { + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/config/bulkaccessconditionoptions")) + .andExpect(status().isForbidden()); + } + + @Test + public void findAllByAnonymousUserTest() throws Exception { + getClient().perform(get("/api/config/bulkaccessconditionoptions")) + .andExpect(status().isUnauthorized()); + } + + @Test + public void findOneByAdminTest() throws Exception { + String tokenAdmin = getAuthToken(admin.getEmail(), password); + getClient(tokenAdmin) + .perform(get("/api/config/bulkaccessconditionoptions/default")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.id", is("default"))) + .andExpect(jsonPath("$.itemAccessConditionOptions", Matchers.containsInAnyOrder( + AccessConditionOptionMatcher.matchAccessConditionOption("openaccess", false , false, null, null), + AccessConditionOptionMatcher.matchAccessConditionOption("embargo", true , false, "+36MONTHS", null), + AccessConditionOptionMatcher.matchAccessConditionOption("administrator", false , false, null, null), + AccessConditionOptionMatcher.matchAccessConditionOption("lease", false , true, null, "+6MONTHS")) + )) + .andExpect(jsonPath("$.bitstreamAccessConditionOptions", Matchers.containsInAnyOrder( + AccessConditionOptionMatcher.matchAccessConditionOption("openaccess", false , false, null, null), + AccessConditionOptionMatcher.matchAccessConditionOption("embargo", true , false, "+36MONTHS", null), + AccessConditionOptionMatcher.matchAccessConditionOption("administrator", false , false, null, null), + AccessConditionOptionMatcher.matchAccessConditionOption("lease", false , true, null, "+6MONTHS")) + )) + .andExpect(jsonPath("$.type", is("bulkaccessconditionoption"))); + } + + @Test + public void findOneByAdminOfAnCommunityTest() throws Exception { + + context.turnOffAuthorisationSystem(); + + // create community and assign eperson to admin group + CommunityBuilder.createCommunity(context) + .withName("community") + .withAdminGroup(eperson) + .build(); + context.restoreAuthSystemState(); + + String authToken = getAuthToken(eperson.getEmail(), password); + + getClient(authToken) + .perform(get("/api/config/bulkaccessconditionoptions/default")) + .andExpect(status().isOk()); + } + + @Test + public void findOneByAdminOfAnCollectionTest() throws Exception { + + context.turnOffAuthorisationSystem(); + + Community community = + CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + // create collection and assign eperson to admin group + CollectionBuilder.createCollection(context, community) + .withName("collection") + .withAdminGroup(eperson) + .build(); + + context.restoreAuthSystemState(); + + String authToken = getAuthToken(eperson.getEmail(), password); + + getClient(authToken) + .perform(get("/api/config/bulkaccessconditionoptions/default")) + .andExpect(status().isOk()); + } + + @Test + public void findOneByAdminOfAnItemTest() throws Exception { + + context.turnOffAuthorisationSystem(); + + Community community = + CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = + CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + // create item and assign eperson as admin user + ItemBuilder.createItem(context, collection) + .withTitle("item") + .withAdminUser(eperson) + .build(); + + context.restoreAuthSystemState(); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/config/bulkaccessconditionoptions/default")) + .andExpect(status().isOk()); + } + + @Test + public void findOneByNormalUserTest() throws Exception { + String tokenEPerson = getAuthToken(eperson.getEmail(), password); + getClient(tokenEPerson) + .perform(get("/api/config/bulkaccessconditionoptions/default")) + .andExpect(status().isForbidden()); + } + + @Test + public void findOneByAnonymousUserTest() throws Exception { + getClient().perform(get("/api/config/bulkaccessconditionoptions/default")) + .andExpect(status().isUnauthorized()); + } + + + @Test + public void findOneNotFoundTest() throws Exception { + String authToken = getAuthToken(admin.getEmail(), password); + getClient(authToken).perform(get("/api/config/bulkaccessconditionoptions/wrong")) + .andExpect(status().isNotFound()); + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CollectionRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CollectionRestRepositoryIT.java index 735d0321ec84..1595fb1069ce 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CollectionRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CollectionRestRepositoryIT.java @@ -3450,128 +3450,6 @@ public void testAdminAuthorizedSearchUnauthenticated() throws Exception { .andExpect(status().isUnauthorized()); } - @Test - public void findAdministeredByEPersonAndAdmin() throws Exception { - setUpAuthorizedSearch(); - context.turnOffAuthorisationSystem(); - EPerson ePerson = EPersonBuilder.createEPerson(context) - .withNameInMetadata("Al", "Al") - .withEmail("epersonToBeTested@my.edu") - .withPassword(password) - .build(); - /** - * The Community/Collection structure for this test: - * - **** topLevelCommunity1 - * ├── subCommunity1 - * ├── subCommunity2 - * | └── collection1 - * └── collection2 - * └── collection3 - * └── collection4 - * - **** community2 - * └── collection5 - * └── collection6 - * └── collection7 - * - **** community3 - * └── collection8 - */ - - Community topLevelCommunity1 = CommunityBuilder.createCommunity(context) - .withName("topLevelCommunity1 is a very original name") - .withAdminGroup(admin) - .build(); - Community subCommunity1 = CommunityBuilder.createSubCommunity(context, topLevelCommunity1) - .withName("subCommunity1") - .withAdminGroup(ePerson) - .build(); - Community subCommunity2 = CommunityBuilder.createSubCommunity(context, topLevelCommunity1) - .withName("subCommunity2") - .withAdminGroup(ePerson) - .build(); - Community community2 = CommunityBuilder.createCommunity(context) - .withAdminGroup(ePerson) - .withAdminGroup(admin) - .withName("community2") - .build(); - Community community3 = CommunityBuilder.createCommunity(context) - .withName("community3") - .build(); - - // even if community has other admin group test should find collection administered by eperson - Collection collection1 = CollectionBuilder.createCollection(context, subCommunity2) - .withName("collection1 is a very original name") - .withAdminGroup(ePerson) - .build(); - Collection collection2 = CollectionBuilder.createCollection(context, topLevelCommunity1) - .withName("collection2 is a very original name") - .build(); - Collection collection3 = CollectionBuilder.createCollection(context, topLevelCommunity1) - .withName("collection3 is a very original name") - .build(); - Collection collection4 = CollectionBuilder.createCollection(context, topLevelCommunity1) - .withName("collection4 is a very original name") - .build(); - Collection collection5 = CollectionBuilder.createCollection(context, community2) - .withName("collection5 is a very original name") - .build(); - Collection collection6 = CollectionBuilder.createCollection(context, community2) - .withName("collection6 is a very original name") - .build(); - Collection collection7 = CollectionBuilder.createCollection(context, community2) - .withName("collection7 is a very original name") - .build(); - - Collection collection8 = CollectionBuilder.createCollection(context, community3) - .withName("collection8 is a very original name") - .build(); - - - context.restoreAuthSystemState(); - String token = getAuthToken(ePerson.getEmail(), password); - - // Verify the community admin gets all the communities he's admin for - getClient(token).perform(get("/api/core/collections/search/findAdminAuthorized")) - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.collections", Matchers.containsInAnyOrder( - CollectionMatcher.matchProperties(collection1.getName(), - collection1.getID(), collection1.getHandle()), - CollectionMatcher.matchProperties(collection5.getName(), - collection5.getID(), collection5.getHandle()), - CollectionMatcher.matchProperties(collection6.getName(), - collection6.getID(), collection6.getHandle()), - CollectionMatcher.matchProperties(collection7.getName(), - collection7.getID(), collection7.getHandle()) - ))); - - // Verify that admin can see all collections - String tokenAdmin = getAuthToken(admin.getEmail(), password); - getClient(tokenAdmin).perform(get("/api/core/collections/search/findAdminAuthorized")) - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.collections", Matchers.containsInAnyOrder( - CollectionMatcher.matchProperties(collectionA.getName(), - collectionA.getID(), collectionA.getHandle()), - CollectionMatcher.matchProperties(collection1.getName(), - collection1.getID(), collection1.getHandle()), - CollectionMatcher.matchProperties(collection2.getName(), - collection2.getID(), collection2.getHandle()), - CollectionMatcher.matchProperties(collection3.getName(), - collection3.getID(), collection3.getHandle()), - CollectionMatcher.matchProperties(collection4.getName(), - collection4.getID(), collection4.getHandle()), - CollectionMatcher.matchProperties(collection5.getName(), - collection5.getID(), collection5.getHandle()), - CollectionMatcher.matchProperties(collection6.getName(), - collection6.getID(), collection6.getHandle()), - CollectionMatcher.matchProperties(collection7.getName(), - collection7.getID(), collection7.getHandle()), - CollectionMatcher.matchProperties(collection8.getName(), - collection8.getID(), collection8.getHandle()) - ))); - } - @Test public void patchReplaceMultipleDescriptionCollection() throws Exception { context.turnOffAuthorisationSystem(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CorrectionStepIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CorrectionStepIT.java index 34665592823e..685036833f06 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CorrectionStepIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CorrectionStepIT.java @@ -126,7 +126,8 @@ public void setup() throws Exception { .withEntityType("Publication") .withWorkflowGroup("editor", admin) .withSubmitterGroup(eperson) - .withSubmissionDefinition("traditional-with-correction") + .withSubmissionDefinition("traditional") + .withCorrectionSubmissionDefinition("traditional-with-correction") .build(); date = "2020-02-20"; @@ -274,6 +275,101 @@ public void checkCorrection() throws Exception { } + @Test + public void checkCorrectionWithDuplicates() throws Exception { + + String tokenSubmitter = getAuthToken(eperson.getEmail(), password); + + //create a correction item + getClient(tokenSubmitter).perform(post("/api/submission/workspaceitems") + .param("owningCollection", collection.getID().toString()) + .param("relationship", "isCorrectionOfItem") + .param("item", itemToBeCorrected.getID().toString()) + .contentType(org.springframework.http.MediaType.APPLICATION_JSON)) + .andExpect(status().isCreated()) + .andDo(result -> workspaceItemIdRef.set(read(result.getResponse().getContentAsString(), "$.id"))); + + List relationshipList = relationshipService.findByItem(context, itemToBeCorrected); + assert relationshipList.size() > 0; + Item correctedItem = relationshipList.get(0).getLeftItem(); + WorkspaceItem newWorkspaceItem = workspaceItemService.findByItem(context,correctedItem); + + //make a change on the title + Map value = new HashMap(); + final String newDate = "2020-02-21"; + value.put("value", newDate); + List operations = new ArrayList(); + operations.add(new ReplaceOperation("/sections/traditionalpageone/dc.date.issued/0", value)); + String patchBody = getPatchContent(operations); + getClient(tokenSubmitter).perform(patch("/api/submission/workspaceitems/" + newWorkspaceItem.getID()) + .content(patchBody) + .contentType("application/json-patch+json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()); + + final String newTitle = "New Title"; + value.put("value", newTitle); + operations = new ArrayList(); + operations.add(new ReplaceOperation("/sections/traditionalpageone/dc.title/0", value)); + patchBody = getPatchContent(operations); + getClient(tokenSubmitter).perform(patch("/api/submission/workspaceitems/" + newWorkspaceItem.getID()) + .content(patchBody) + .contentType("application/json-patch+json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()); + + //remove subject + operations = new ArrayList(); + operations.add(new RemoveOperation("/sections/traditionalpagetwo/dc.subject/0")); + patchBody = getPatchContent(operations); + getClient(tokenSubmitter).perform(patch("/api/submission/workspaceitems/" + newWorkspaceItem.getID()) + .content(patchBody) + .contentType("application/json-patch+json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()); + + //add an asbtract description + Map addValue = new HashMap(); + final String newDescription = "New Description"; + addValue.put("value", newDescription); + operations = new ArrayList(); + operations.add(new AddOperation("/sections/traditionalpagetwo/dc.description.abstract", List.of(addValue))); + patchBody = getPatchContent(operations); + getClient(tokenSubmitter).perform(patch("/api/submission/workspaceitems/" + newWorkspaceItem.getID()) + .content(patchBody) + .contentType("application/json-patch+json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()); + + getClient(tokenSubmitter).perform(get("/api/submission/workspaceitems/" + newWorkspaceItem.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.sections.correction.metadata").doesNotExist()); + + AtomicReference workflowItemIdRef = new AtomicReference(); + + getClient(tokenSubmitter).perform(post("/api/workflow/workflowitems") + .content("/api/submission/workspaceitems/" + newWorkspaceItem.getID()) + .contentType(textUriContentType)) + .andExpect(status().isCreated()) + .andDo(result -> workflowItemIdRef.set(read(result.getResponse().getContentAsString(), "$.id"))); + + String tokenAdmin = getAuthToken(admin.getEmail(), password); + + //check if the correction is present + final String extraEntry = "ExtraEntry"; + getClient(tokenAdmin).perform(get("/api/workflow/workflowitems/" + workflowItemIdRef.get())) + //The status has to be 200 OK + .andExpect(status().isOk()) + //The array of browse index should have a size equals to 4 + .andExpect(jsonPath("$.sections.correction.metadata", hasSize(equalTo(4)))) + .andExpect(jsonPath("$.sections.correction.empty", is(false))) + .andExpect(jsonPath("$.sections.correction.metadata",hasItem(matchMetadataCorrection(newTitle)))) + .andExpect(jsonPath("$.sections.correction.metadata",hasItem(matchMetadataCorrection(newDate)))) + .andExpect(jsonPath("$.sections.correction.metadata",hasItem(matchMetadataCorrection(newDescription)))) + .andExpect(jsonPath("$.sections.correction.metadata",hasItem(matchMetadataCorrection(extraEntry)))); + + } + @Test public void checkEmptyCorrection() throws Exception { String tokenSubmitter = getAuthToken(eperson.getEmail(), password); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CreateWorkspaceItemFromExternalServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CreateWorkspaceItemFromExternalServiceIT.java index 79253dcc6dee..19100f722333 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CreateWorkspaceItemFromExternalServiceIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CreateWorkspaceItemFromExternalServiceIT.java @@ -8,6 +8,7 @@ package org.dspace.app.rest; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; @@ -199,6 +200,8 @@ public void creatingWorkspaceItemImportedFromScopusTest() throws Exception { + ".traditionalpageone['dc.identifier.scopus'][0].value", is(scopus2R.getValue()))) .andExpect(jsonPath("$._embedded.workflowitems[1].sections" + ".traditionalpageone['dc.identifier.doi'][0].value", is(doi2R.getValue()))) + .andExpect(jsonPath("$._embedded.workflowitems[0].sections.license.url", + containsString("/api/core/bitstreams/"))) .andExpect(jsonPath("$.page.totalElements", is(2))); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java index 9a0d39225c3d..a8417e84f809 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java @@ -146,13 +146,15 @@ private ArrayList getRecords() { + " Medical College of Prevention of Iodine Deficiency Diseases"); MetadatumDTO author = createMetadatumDTO("dc", "contributor", "author", "L.V. Senyuk"); MetadatumDTO type = createMetadatumDTO("dc", "type", null, "journal-article"); - MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2016"); + MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2016-05-19"); MetadatumDTO ispartof = createMetadatumDTO("dc", "relation", "ispartof", "Ukraïnsʹkij žurnal medicini, bìologìï ta sportu"); MetadatumDTO doi = createMetadatumDTO("dc", "identifier", "doi", "10.26693/jmbs01.02.184"); - MetadatumDTO issn = createMetadatumDTO("dc", "identifier", "issn", "2415-3060"); + MetadatumDTO issn = createMetadatumDTO("dc", "relation", "issn", "2415-3060"); MetadatumDTO volume = createMetadatumDTO("oaire", "citation", "volume", "1"); MetadatumDTO issue = createMetadatumDTO("oaire", "citation", "issue", "2"); + MetadatumDTO publisher = createMetadatumDTO("dc", "publisher", null, + "Petro Mohyla Black Sea National University"); metadatums.add(title); metadatums.add(author); @@ -163,6 +165,7 @@ private ArrayList getRecords() { metadatums.add(issn); metadatums.add(volume); metadatums.add(issue); + metadatums.add(publisher); ImportRecord firstrRecord = new ImportRecord(metadatums); @@ -172,13 +175,15 @@ private ArrayList getRecords() { "Ischemic Heart Disease and Role of Nurse of Cardiology Department"); MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "K. І. Kozak"); MetadatumDTO type2 = createMetadatumDTO("dc", "type", null, "journal-article"); - MetadatumDTO date2 = createMetadatumDTO("dc", "date", "issued", "2016"); + MetadatumDTO date2 = createMetadatumDTO("dc", "date", "issued", "2016-05-19"); MetadatumDTO ispartof2 = createMetadatumDTO("dc", "relation", "ispartof", "Ukraïnsʹkij žurnal medicini, bìologìï ta sportu"); MetadatumDTO doi2 = createMetadatumDTO("dc", "identifier", "doi", "10.26693/jmbs01.02.105"); - MetadatumDTO issn2 = createMetadatumDTO("dc", "identifier", "issn", "2415-3060"); + MetadatumDTO issn2 = createMetadatumDTO("dc", "relation", "issn", "2415-3060"); MetadatumDTO volume2 = createMetadatumDTO("oaire", "citation", "volume", "1"); MetadatumDTO issue2 = createMetadatumDTO("oaire", "citation", "issue", "2"); + MetadatumDTO publisher2 = createMetadatumDTO("dc", "publisher", null, + "Petro Mohyla Black Sea National University"); metadatums2.add(title2); metadatums2.add(author2); @@ -189,6 +194,7 @@ private ArrayList getRecords() { metadatums2.add(issn2); metadatums2.add(volume2); metadatums2.add(issue2); + metadatums2.add(publisher2); ImportRecord secondRecord = new ImportRecord(metadatums2); records.add(firstrRecord); @@ -196,4 +202,4 @@ private ArrayList getRecords() { return records; } -} \ No newline at end of file +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRelatedBoxComponentIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRelatedBoxComponentIT.java index 5f27cf57b05c..9902bfa24797 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRelatedBoxComponentIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRelatedBoxComponentIT.java @@ -243,13 +243,11 @@ public void discoverSearchObjectsTestWithScope() throws Exception { .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), -// FacetEntryMatcher.typeFacet(false), FacetEntryMatcher.authorFacet(false), FacetEntryMatcher.anyFacet("editor", "text"), FacetEntryMatcher.anyFacet("organization", "text"), FacetEntryMatcher.anyFacet("funding", "text"), FacetEntryMatcher.anyFacet("itemtype", "hierarchical"), -// FacetEntryMatcher.anyFacet("subject", "hierarchical"), FacetEntryMatcher.subjectFacet(false), FacetEntryMatcher.dateIssuedFacet(false), FacetEntryMatcher.hasContentInOriginalBundleFacet(false) @@ -283,17 +281,16 @@ public void discoverSearchObjectsTestWithScope() throws Exception { //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), -// FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.anyFacet("editor", "text"), - FacetEntryMatcher.anyFacet("organization", "text"), - FacetEntryMatcher.anyFacet("funding", "text"), - FacetEntryMatcher.anyFacet("itemtype", "hierarchical"), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.anyFacet("editor", "text"), + FacetEntryMatcher.anyFacet("organization", "text"), + FacetEntryMatcher.anyFacet("funding", "text"), + FacetEntryMatcher.anyFacet("itemtype", "hierarchical"), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) ))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) @@ -326,17 +323,16 @@ public void discoverSearchObjectsTestWithScope() throws Exception { //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), -// FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.anyFacet("editor", "text"), - FacetEntryMatcher.anyFacet("organization", "text"), - FacetEntryMatcher.anyFacet("funding", "text"), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.anyFacet("itemtype", "hierarchical"), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.anyFacet("editor", "text"), + FacetEntryMatcher.anyFacet("organization", "text"), + FacetEntryMatcher.anyFacet("funding", "text"), + FacetEntryMatcher.anyFacet("itemtype", "hierarchical"), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) ))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java index 64c1907a5a46..6504c3e2c3c3 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java @@ -28,6 +28,7 @@ import java.io.InputStream; import java.util.ArrayList; +import java.util.List; import java.util.Optional; import java.util.UUID; @@ -90,6 +91,7 @@ import org.dspace.utils.DSpace; import org.dspace.xmlworkflow.storedcomponents.ClaimedTask; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; +import org.hamcrest.Matcher; import org.hamcrest.Matchers; import org.junit.Ignore; import org.junit.Test; @@ -112,6 +114,24 @@ public class DiscoveryRestControllerIT extends AbstractControllerIntegrationTest @Autowired ChoiceAuthorityService choiceAuthorityService; + /** + * This field has been created to easily modify the tests when updating the defaultConfiguration's sidebar facets + */ + List> customSidebarFacets = List.of( + ); + + /** + * This field has been created to easily modify the tests when updating the defaultConfiguration's search filters + */ + List> customSearchFilters = List.of( + ); + + /** + * This field has been created to easily modify the tests when updating the defaultConfiguration's sort fields + */ + List> customSortFields = List.of( + ); + @Test public void rootDiscoverTest() throws Exception { @@ -132,6 +152,18 @@ public void rootDiscoverTest() throws Exception { @Test public void discoverFacetsTestWithoutParameters() throws Exception { + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); //When we call this facets endpoint getClient().perform(get("/api/discover/facets")) @@ -143,17 +175,7 @@ public void discoverFacetsTestWithoutParameters() throws Exception { //There needs to be a self link to this endpoint .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) //We have 4 facets in the default configuration, they need to all be present in the embedded section - .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false))) - ); + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(allExpectedSidebarFacets))); } @Test @@ -305,7 +327,7 @@ public void discoverFacetsAuthorWithAuthorityWithSizeParameter() throws Exceptio .andExpect(status().isOk()) //The type needs to be 'discover' .andExpect(jsonPath("$.type", is("discover"))) - //The name of the facet needs to be seubject, because that's what we called + //The name of the facet needs to be author, because that's what we called .andExpect(jsonPath("$.name", is("author"))) //Because we've constructed such a structure so that we have more than 2 (size) subjects, there // needs to be a next link @@ -1095,6 +1117,44 @@ public void discoverFacetsDateTestWithSearchFilter() throws Exception { @Test public void discoverSearchTest() throws Exception { + List> allExpectedSearchFilters = new ArrayList<>(customSearchFilters); + allExpectedSearchFilters.addAll(List.of( + SearchFilterMatcher.barDateIssuedYearFilter(), + SearchFilterMatcher.pieItemtypeFilter(), + SearchFilterMatcher.typeFilter(), + SearchFilterMatcher.titleFilter(), + SearchFilterMatcher.authorFilter(), + SearchFilterMatcher.subjectFilter(), + SearchFilterMatcher.dateIssuedFilter(), + SearchFilterMatcher.hasContentInOriginalBundleFilter(), + SearchFilterMatcher.hasFileNameInOriginalBundleFilter(), + SearchFilterMatcher.hasFileDescriptionInOriginalBundleFilter(), + SearchFilterMatcher.entityTypeFilter(), + SearchFilterMatcher.isAuthorOfPublicationRelation(), + SearchFilterMatcher.isProjectOfPublicationRelation(), + SearchFilterMatcher.isOrgUnitOfPublicationRelation(), + SearchFilterMatcher.isPublicationOfJournalIssueRelation(), + SearchFilterMatcher.isJournalOfPublicationRelation(), + SearchFilterMatcher.languageFilter() + )); + + List> allExpectedSortFields = new ArrayList<>(customSortFields); + allExpectedSortFields.addAll(List.of( + SortOptionMatcher.sortOptionMatcher( + "score", DiscoverySortFieldConfiguration.SORT_ORDER.desc.name()), + SortOptionMatcher.sortOptionMatcher( + "dc.title", DiscoverySortFieldConfiguration.SORT_ORDER.asc.name()), + SortOptionMatcher.sortOptionMatcher( + "dc.title", DiscoverySortFieldConfiguration.SORT_ORDER.desc.name()), + SortOptionMatcher.sortOptionMatcher( + "dc.date.issued", DiscoverySortFieldConfiguration.SORT_ORDER.asc.name()), + SortOptionMatcher.sortOptionMatcher( + "dc.date.issued", DiscoverySortFieldConfiguration.SORT_ORDER.desc.name()), + SortOptionMatcher.sortOptionMatcher( + "dc.date.accessioned", DiscoverySortFieldConfiguration.SORT_ORDER.asc.name()), + SortOptionMatcher.sortOptionMatcher( + "dc.date.accessioned", DiscoverySortFieldConfiguration.SORT_ORDER.desc.name()) + )); //When calling this root endpoint getClient().perform(get("/api/discover/search")) @@ -1109,42 +1169,9 @@ public void discoverSearchTest() throws Exception { .andExpect(jsonPath("$._links.self.href", containsString("api/discover/search"))) //There needs to be a section where these filters as specified as they're the default filters // given in the configuration - .andExpect(jsonPath("$.filters", containsInAnyOrder( - SearchFilterMatcher.barDateIssuedYearFilter(), - SearchFilterMatcher.pieItemtypeFilter(), - SearchFilterMatcher.typeFilter(), - SearchFilterMatcher.titleFilter(), - SearchFilterMatcher.authorFilter(), - SearchFilterMatcher.subjectFilter(), - SearchFilterMatcher.dateIssuedFilter(), - SearchFilterMatcher.hasContentInOriginalBundleFilter(), - SearchFilterMatcher.hasFileNameInOriginalBundleFilter(), - SearchFilterMatcher.hasFileDescriptionInOriginalBundleFilter(), - SearchFilterMatcher.entityTypeFilter(), - SearchFilterMatcher.isAuthorOfPublicationRelation(), - SearchFilterMatcher.isProjectOfPublicationRelation(), - SearchFilterMatcher.isOrgUnitOfPublicationRelation(), - SearchFilterMatcher.isPublicationOfJournalIssueRelation(), - SearchFilterMatcher.isJournalOfPublicationRelation(), - SearchFilterMatcher.languageFilter() - ))) + .andExpect(jsonPath("$.filters", containsInAnyOrder(allExpectedSearchFilters))) //These sortOptions need to be present as it's the default in the configuration - .andExpect(jsonPath("$.sortOptions", contains( - SortOptionMatcher.sortOptionMatcher( - "score", DiscoverySortFieldConfiguration.SORT_ORDER.desc.name()), - SortOptionMatcher.sortOptionMatcher( - "dc.title", DiscoverySortFieldConfiguration.SORT_ORDER.asc.name()), - SortOptionMatcher.sortOptionMatcher( - "dc.title", DiscoverySortFieldConfiguration.SORT_ORDER.desc.name()), - SortOptionMatcher.sortOptionMatcher( - "dc.date.issued", DiscoverySortFieldConfiguration.SORT_ORDER.asc.name()), - SortOptionMatcher.sortOptionMatcher( - "dc.date.issued", DiscoverySortFieldConfiguration.SORT_ORDER.desc.name()), - SortOptionMatcher.sortOptionMatcher( - "dc.date.accessioned", DiscoverySortFieldConfiguration.SORT_ORDER.asc.name()), - SortOptionMatcher.sortOptionMatcher( - "dc.date.accessioned", DiscoverySortFieldConfiguration.SORT_ORDER.desc.name()) - ))); + .andExpect(jsonPath("$.sortOptions", contains(allExpectedSortFields))); } @Test @@ -1248,6 +1275,18 @@ public void discoverSearchObjectsTest() throws Exception { //** WHEN ** //An anonymous user browses this endpoint to find the objects in the system + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects")) //** THEN ** //The status has to be 200 OK @@ -1269,17 +1308,7 @@ public void discoverSearchObjectsTest() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -1384,6 +1413,18 @@ public void discoverSearchObjectsTestHasMoreAuthorFacet() throws Exception { //** WHEN ** //An anonymous user browses this endpoint to find the objects in the system + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(true), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects")) //** THEN ** //The status has to be 200 OK @@ -1407,17 +1448,7 @@ public void discoverSearchObjectsTestHasMoreAuthorFacet() throws Exception { // property because we don't exceed their default limit for a hasMore true (the default is 10) //We do however exceed the limit for the authors, so this property has to be true for the author // facet - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(true), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -1473,7 +1504,19 @@ public void discoverSearchObjectsTestHasMoreSubjectFacet() throws Exception { context.restoreAuthSystemState(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(true), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects")) //** THEN ** @@ -1496,17 +1539,7 @@ public void discoverSearchObjectsTestHasMoreSubjectFacet() throws Exception { // property because we don't exceed their default limit for a hasMore true (the default is 10) //We do however exceed the limit for the subject, so this property has to be true for the subject // facet - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(true), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -1553,8 +1586,20 @@ public void discoverSearchObjectsTestWithBasicQuery() throws Exception { context.restoreAuthSystemState(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a query that says that the title has to contain 'test' + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "test,contains")) @@ -1574,23 +1619,13 @@ public void discoverSearchObjectsTestWithBasicQuery() throws Exception { SearchResultMatcher.match("core", "item", "items"), SearchResultMatcher.match("core", "item", "items") ))) - //We need to display the appliedFilters object that contains the query that we've ran + //We need to display the appliedFilters object that contains the query that we've run .andExpect(jsonPath("$.appliedFilters", contains( AppliedFilterMatcher.appliedFilterEntry("title", "contains", "test", "test") ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -1663,8 +1698,20 @@ public void discoverSearchObjectsTestWithScope() throws Exception { context.restoreAuthSystemState(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a scope 'test' + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("scope", "test")) @@ -1687,17 +1734,7 @@ public void discoverSearchObjectsTestWithScope() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -1746,9 +1783,21 @@ public void discoverSearchObjectsTestWithDsoType() throws Exception { context.restoreAuthSystemState(); // ** WHEN ** - // An anonymous user browses this endpoint to find the the objects in the system + // An anonymous user browses this endpoint to find the objects in the system // With dsoType 'item' + List> allExpectedSidebarFacetsWithDsoTypeItem = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacetsWithDsoTypeItem.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("dsoType", "Item")) @@ -1771,21 +1820,24 @@ public void discoverSearchObjectsTestWithDsoType() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", + Matchers.containsInAnyOrder(allExpectedSidebarFacetsWithDsoTypeItem))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))); // With dsoTypes 'community' and 'collection' + List> allExpectedSidebarFacetsWithDsoTypesComCol = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacetsWithDsoTypesComCol.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("configuration", "backend") .param("dsoType", "Community") @@ -1811,21 +1863,25 @@ public void discoverSearchObjectsTestWithDsoType() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", + Matchers.containsInAnyOrder(allExpectedSidebarFacetsWithDsoTypesComCol))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))); // With dsoTypes 'collection' and 'item' + List> allExpectedSidebarFacetsWithDsoTypesColItem = + new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacetsWithDsoTypesColItem.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("configuration", "backend") .param("dsoType", "Collection") @@ -1852,21 +1908,25 @@ public void discoverSearchObjectsTestWithDsoType() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", + Matchers.containsInAnyOrder(allExpectedSidebarFacetsWithDsoTypesColItem))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))); // With dsoTypes 'community', 'collection' and 'item' + List> allExpectedSidebarFacetsWithDsoTypesComColItem = + new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacetsWithDsoTypesComColItem.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("configuration", "backend") .param("dsoType", "Community") @@ -1897,17 +1957,8 @@ public void discoverSearchObjectsTestWithDsoType() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", + Matchers.containsInAnyOrder(allExpectedSidebarFacetsWithDsoTypesComColItem))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))); } @@ -1954,9 +2005,21 @@ public void discoverSearchObjectsTestWithDsoTypeAndSort() throws Exception { context.restoreAuthSystemState(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a dsoType 'item' //And a sort on the dc.title ascending + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("dsoType", "Item") .param("sort", "dc.title,ASC")) @@ -1988,17 +2051,7 @@ public void discoverSearchObjectsTestWithDsoTypeAndSort() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //We want to get the sort that's been used as well in the response .andExpect(jsonPath("$.sort", is( SortOptionMatcher.sortByAndOrder("dc.title", "ASC") @@ -2180,8 +2233,20 @@ public void discoverSearchObjectsTestForPaginationAndNextLinks() throws Exceptio context.restoreAuthSystemState(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a size 2 + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(true), + FacetEntryMatcher.subjectFacet(true), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("size", "1") .param("page", "1")) @@ -2205,17 +2270,7 @@ public void discoverSearchObjectsTestForPaginationAndNextLinks() throws Exceptio .andExpect(jsonPath("$._embedded.searchResult._embedded.objects", Matchers.containsInAnyOrder( SearchResultMatcher.match() ))) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(true), - FacetEntryMatcher.subjectFacet(true), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -2278,8 +2333,20 @@ public void discoverSearchObjectsTestWithContentInABitstream() throws Exception context.restoreAuthSystemState(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a query stating 'ThisIsSomeDummyText' + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("query", "ThisIsSomeDummyText")) @@ -2299,17 +2366,7 @@ public void discoverSearchObjectsTestWithContentInABitstream() throws Exception //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -2362,8 +2419,19 @@ public void discoverSearchObjectsTestForEmbargoedItemsAndPrivateItems() throws E //Turn on the authorization again context.restoreAuthSystemState(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system - // + //An anonymous user browses this endpoint to find the objects in the system + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects")) //** THEN ** //The status has to be 200 OK @@ -2389,17 +2457,7 @@ public void discoverSearchObjectsTestForEmbargoedItemsAndPrivateItems() throws E ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -2457,7 +2515,7 @@ public void discoverSearchObjectsTestWithContentInAPrivateBitstream() throws Exc context.restoreAuthSystemState(); context.setCurrentUser(null); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a size 2 getClient().perform(get("/api/discover/search/objects") .param("query", "ThisIsSomeDummyText")) @@ -2534,8 +2592,20 @@ public void discoverSearchObjectsTestForScope() throws Exception { UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the scope given + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("scope", String.valueOf(scope))) //** THEN ** @@ -2556,17 +2626,7 @@ public void discoverSearchObjectsTestForScope() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -2617,8 +2677,20 @@ public void discoverSearchObjectsTestForScopeWithPrivateItem() throws Exception UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a size 2 + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("scope", String.valueOf(scope))) //** THEN ** @@ -2645,17 +2717,7 @@ public void discoverSearchObjectsTestForScopeWithPrivateItem() throws Exception )))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -2807,8 +2869,20 @@ public void discoverSearchObjectsTestForHitHighlights() throws Exception { String query = "Public"; //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a query stating 'public' + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("query", query)) //** THEN ** @@ -2829,17 +2903,7 @@ public void discoverSearchObjectsTestForHitHighlights() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -2889,7 +2953,7 @@ public void discoverSearchObjectsTestForHitHighlightsWithPrivateItem() throws Ex String query = "Public"; //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a query stating 'Public' getClient().perform(get("/api/discover/search/objects") .param("query", query)) @@ -2955,10 +3019,21 @@ public void discoverSearchObjectsWithQueryOperatorContains_query() throws Except context.restoreAuthSystemState(); - UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the given search filter + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "test*,query")) //** THEN ** @@ -2977,17 +3052,7 @@ public void discoverSearchObjectsWithQueryOperatorContains_query() throws Except ))) //These facets have to show up in the embedded.facets section as well with the given hasMore property // because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3035,10 +3100,21 @@ public void discoverSearchObjectsWithQueryOperatorContains() throws Exception { context.restoreAuthSystemState(); - UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the given search filter + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "test,contains")) //** THEN ** @@ -3057,18 +3133,9 @@ public void discoverSearchObjectsWithQueryOperatorContains() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore property // because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) - //There always needs to be a self link available + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) + + //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3116,8 +3183,20 @@ public void discoverSearchObjectsWithQueryOperatorNotContains_query() throws Exc UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the given search filter + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "-test*,query")) //** THEN ** @@ -3135,17 +3214,7 @@ public void discoverSearchObjectsWithQueryOperatorNotContains_query() throws Exc ))) //These facets have to show up in the embedded.facets section as well with the given hasMore property // because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3193,10 +3262,21 @@ public void discoverSearchObjectsWithQueryOperatorNotContains() throws Exception context.restoreAuthSystemState(); - UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the given search filter + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "test,notcontains")) //** THEN ** @@ -3214,17 +3294,8 @@ public void discoverSearchObjectsWithQueryOperatorNotContains() throws Exception ))) //These facets have to show up in the embedded.facets section as well with the given hasMore property // because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) + //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3279,8 +3350,20 @@ public void discoverSearchObjectsTestForMinMaxValues() throws Exception { context.restoreAuthSystemState(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a size 2 + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacetWithMinMax(true, "Doe, Jane", "Testing, Works"), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(true), + FacetEntryMatcher.dateIssuedFacetWithMinMax(false, "1990-02-13", "2010-10-17"), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("size", "2") .param("page", "1")) @@ -3303,17 +3386,7 @@ public void discoverSearchObjectsTestForMinMaxValues() throws Exception { .andExpect(jsonPath("$._embedded.searchResult._embedded.objects", Matchers.containsInAnyOrder( SearchResultMatcher.match() ))) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacetWithMinMax(true, "Doe, Jane", "Testing, Works"), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(true), - FacetEntryMatcher.dateIssuedFacetWithMinMax(false, "1990-02-13", "2010-10-17"), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3368,25 +3441,27 @@ public void discoverSearchFacetsTestForMinMaxValues() throws Exception { context.restoreAuthSystemState(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a size 2 + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacetWithMinMax(true, "Doe, Jane", "Testing, Works"), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(true), + FacetEntryMatcher.dateIssuedFacetWithMinMax(false, "1990-02-13", "2010-10-17"), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/facets")) //** THEN ** //The status has to be 200 OK .andExpect(status().isOk()) //The type has to be 'discover' .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacetWithMinMax(true, "Doe, Jane", "Testing, Works"), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(true), - FacetEntryMatcher.dateIssuedFacetWithMinMax(false, "1990-02-13", "2010-10-17"), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/facets"))) ; @@ -3433,10 +3508,21 @@ public void discoverSearchObjectsWithQueryOperatorEquals_query() throws Exceptio context.restoreAuthSystemState(); - UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the given search filter + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "Test,query")) //** THEN ** @@ -3454,17 +3540,7 @@ public void discoverSearchObjectsWithQueryOperatorEquals_query() throws Exceptio ))) //These facets have to show up in the embedded.facets section as well with the given hasMore property // because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3512,10 +3588,21 @@ public void discoverSearchObjectsWithQueryOperatorEquals() throws Exception { context.restoreAuthSystemState(); - UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the given search filter + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "Test,equals")) //** THEN ** @@ -3533,17 +3620,7 @@ public void discoverSearchObjectsWithQueryOperatorEquals() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore property // because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3590,10 +3667,21 @@ public void discoverSearchObjectsWithQueryOperatorNotEquals_query() throws Excep context.restoreAuthSystemState(); - UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the given search filter + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "-Test,query")) //** THEN ** @@ -3612,17 +3700,7 @@ public void discoverSearchObjectsWithQueryOperatorNotEquals_query() throws Excep ))) //These facets have to show up in the embedded.facets section as well with the given hasMore property // because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3670,10 +3748,21 @@ public void discoverSearchObjectsWithQueryOperatorNotEquals() throws Exception { context.restoreAuthSystemState(); - UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the given search filter + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "Test,notequals")) //** THEN ** @@ -3692,17 +3781,7 @@ public void discoverSearchObjectsWithQueryOperatorNotEquals() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore property // because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3749,10 +3828,21 @@ public void discoverSearchObjectsWithQueryOperatorNotAuthority_query() throws Ex context.restoreAuthSystemState(); - UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the given search filter + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "-id:test,query")) //** THEN ** @@ -3770,17 +3860,7 @@ public void discoverSearchObjectsWithQueryOperatorNotAuthority_query() throws Ex ))) //These facets have to show up in the embedded.facets section as well with the given hasMore property // because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3828,10 +3908,21 @@ public void discoverSearchObjectsWithQueryOperatorNotAuthority() throws Exceptio context.restoreAuthSystemState(); - UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the given search filter + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.languageFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "test,notauthority")) //** THEN ** @@ -3849,17 +3940,7 @@ public void discoverSearchObjectsWithQueryOperatorNotAuthority() throws Exceptio ))) //These facets have to show up in the embedded.facets section as well with the given hasMore property // because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3869,7 +3950,7 @@ public void discoverSearchObjectsWithQueryOperatorNotAuthority() throws Exceptio @Test public void discoverSearchObjectsWithMissingQueryOperator() throws Exception { //** WHEN ** - // An anonymous user browses this endpoint to find the the objects in the system + // An anonymous user browses this endpoint to find the objects in the system // With the given search filter where there is the filter operator missing in the value (must be of form // <:filter-value>,<:filter-operator>) getClient().perform(get("/api/discover/search/objects") @@ -3882,10 +3963,10 @@ public void discoverSearchObjectsWithMissingQueryOperator() throws Exception { @Test public void discoverSearchObjectsWithNotValidQueryOperator() throws Exception { //** WHEN ** - // An anonymous user browses this endpoint to find the the objects in the system + // An anonymous user browses this endpoint to find the objects in the system // With the given search filter where there is a non-valid filter operator given (must be of form // <:filter-value>,<:filter-operator> where the filter operator is one of: “contains”, “notcontains”, "equals" - // “notequals”, “authority”, “notauthority”, "query”); see enum RestSearchOperator + // “notequals”, “authority”, “notauthority”, "query"); see enum RestSearchOperator getClient().perform(get("/api/discover/search/objects") .param("f.title", "test,operator")) //** THEN ** @@ -4183,8 +4264,8 @@ public void discoverSearchObjectsTestWithUnEscapedLuceneCharactersTest() throws @Test /** - * This test is intent to verify that inprogress submission (workspaceitem, workflowitem, pool task and claimed - * tasks) don't interfers with the standard search + * This test is intended to verify that an in progress submission (workspaceitem, workflowitem, pool task and + * claimed tasks) don't interfere with the standard search * * @throws Exception */ @@ -4234,7 +4315,7 @@ public void discoverSearchObjectsWithInProgressSubmissionTest() throws Exception .withSubject("ExtraEntry") .build(); - //3. three inprogress submission from a normal user (2 ws, 1 wf that will produce also a pooltask) + //3. three in progress submission from a normal user (2 ws, 1 wf that will produce also a pooltask) context.setCurrentUser(eperson); WorkspaceItem wsItem1 = WorkspaceItemBuilder.createWorkspaceItem(context, col1).withTitle("Workspace Item 1") .build(); @@ -4249,7 +4330,7 @@ public void discoverSearchObjectsWithInProgressSubmissionTest() throws Exception ClaimedTask cTask = ClaimedTaskBuilder.createClaimedTask(context, col2, admin).withTitle("Claimed Item") .build(); - // 5. other inprogress submissions made by the administrator + // 5. other in progress submissions made by the administrator context.setCurrentUser(admin); WorkspaceItem wsItem1Admin = WorkspaceItemBuilder.createWorkspaceItem(context, col1) .withTitle("Admin Workspace Item 1").build(); @@ -4264,7 +4345,19 @@ public void discoverSearchObjectsWithInProgressSubmissionTest() throws Exception //** WHEN ** // An anonymous user, the submitter and the admin that browse this endpoint to find the public objects in the - // system should not retrieve the inprogress submissions and related objects + // system should not retrieve the in progress submissions and related objects + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.typeFacet(false), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.languageFacet(false) + )); String[] tokens = new String[] { null, getAuthToken(eperson.getEmail(), password), @@ -4293,17 +4386,7 @@ public void discoverSearchObjectsWithInProgressSubmissionTest() throws Exception ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), - FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.languageFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -4366,7 +4449,7 @@ public void discoverSearchObjectsWorkspaceConfigurationTest() throws Exception { .withSubject("ExtraEntry") .build(); - //3. three inprogress submission from our submitter user (2 ws, 1 wf that will produce also a pooltask) + //3. three in progress submission from our submitter user (2 ws, 1 wf that will produce also a pooltask) WorkspaceItem wsItem1 = WorkspaceItemBuilder.createWorkspaceItem(context, col1).withTitle("Workspace Item 1") .withIssueDate("2010-07-23") .build(); @@ -4384,7 +4467,7 @@ public void discoverSearchObjectsWorkspaceConfigurationTest() throws Exception { .withIssueDate("2010-11-03") .build(); - // 5. other inprogress submissions made by the administrator + // 5. other in progress submissions made by the administrator context.setCurrentUser(admin); WorkspaceItem wsItem1Admin = WorkspaceItemBuilder.createWorkspaceItem(context, col1) .withIssueDate("2010-07-23") @@ -4568,7 +4651,7 @@ public void discoverSearchObjectsWorkflowConfigurationTest() throws Exception { .withSubject("ExtraEntry") .build(); - //3. three inprogress submission from a normal user (2 ws, 1 wf that will produce also a pooltask) + //3. three in progress submission from a normal user (2 ws, 1 wf that will produce also a pooltask) context.setCurrentUser(eperson); WorkspaceItem wsItem1 = WorkspaceItemBuilder.createWorkspaceItem(context, col1).withTitle("Workspace Item 1") .withIssueDate("2010-07-23") @@ -4587,7 +4670,7 @@ public void discoverSearchObjectsWorkflowConfigurationTest() throws Exception { .withIssueDate("2010-11-03") .build(); - // 5. other inprogress submissions made by the administrator + // 5. other in progress submissions made by the administrator context.setCurrentUser(admin); WorkspaceItem wsItem1Admin = WorkspaceItemBuilder.createWorkspaceItem(context, col1) .withIssueDate("2010-07-23") @@ -4601,7 +4684,7 @@ public void discoverSearchObjectsWorkflowConfigurationTest() throws Exception { .withIssueDate("2010-11-03") .withTitle("Admin Workflow Item 1").build(); - // 6. a pool taks in the second step of the workflow + // 6. a pool task in the second step of the workflow ClaimedTask cTask2 = ClaimedTaskBuilder.createClaimedTask(context, col2, admin).withTitle("Pool Step2 Item") .withIssueDate("2010-11-04") .build(); @@ -4628,7 +4711,7 @@ public void discoverSearchObjectsWorkflowConfigurationTest() throws Exception { // 1 pool task in step 1, submitted by the same regular submitter // 1 pool task in step 1, submitted by the admin // 1 claimed task in the first workflow step from the repository admin - // 1 pool task task in step 2, from the repository admin + // 1 pool task in step 2, from the repository admin // (This one is created by creating a claimed task for step 1 and approving it) //** WHEN ** @@ -4857,7 +4940,7 @@ public void discoverSearchObjectsWorkflowAdminConfigurationTest() throws Excepti .withIssueDate("2010-11-03") .build(); - // 5. other inprogress submissions made by the administrator + // 5. other in progress submissions made by the administrator context.setCurrentUser(admin); WorkspaceItem wsItem1Admin = WorkspaceItemBuilder.createWorkspaceItem(context, col1) .withIssueDate("2010-07-23") @@ -4871,7 +4954,7 @@ public void discoverSearchObjectsWorkflowAdminConfigurationTest() throws Excepti .withIssueDate("2010-11-03") .withTitle("Admin Workflow Item 1").build(); - // 6. a pool taks in the second step of the workflow + // 6. a pool task in the second step of the workflow ClaimedTask cTask2 = ClaimedTaskBuilder.createClaimedTask(context, col2, admin).withTitle("Pool Step2 Item") .withIssueDate("2010-11-04") .build(); @@ -4898,7 +4981,7 @@ public void discoverSearchObjectsWorkflowAdminConfigurationTest() throws Excepti // 1 pool task in step 1, submitted by the same regular submitter // 1 pool task in step 1, submitted by the admin // 1 claimed task in the first workflow step from the repository admin - // 1 pool task task in step 2, from the repository admin + // 1 pool task in step 2, from the repository admin // (This one is created by creating a claimed task for step 1 and approving it) //** WHEN ** @@ -6615,7 +6698,7 @@ public void discoverSearchObjectsSupervisionConfigurationTest() throws Exception .withSubject("ExtraEntry") .build(); - //3. three inprogress submission from a normal user (2 ws, 1 wf that will produce also a pooltask) + //3. three in progress submission from a normal user (2 ws, 1 wf that will produce also a pooltask) context.setCurrentUser(eperson); WorkspaceItem wsItem1 = WorkspaceItemBuilder.createWorkspaceItem(context, col1) .withTitle("Workspace Item 1") @@ -6645,7 +6728,7 @@ public void discoverSearchObjectsSupervisionConfigurationTest() throws Exception .withIssueDate("2010-11-03") .build(); - // 5. other inprogress submissions made by the administrator + // 5. other in progress submissions made by the administrator context.setCurrentUser(admin); WorkspaceItem wsItem1Admin = WorkspaceItemBuilder.createWorkspaceItem(context, col1) .withIssueDate("2010-07-23") diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerMultiLanguageIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerMultiLanguageIT.java index 08118f6e7fcf..60f2c183bc45 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerMultiLanguageIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerMultiLanguageIT.java @@ -20,6 +20,7 @@ import org.dspace.app.rest.matcher.PageMatcher; import org.dspace.app.rest.matcher.SearchResultMatcher; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.app.util.SubmissionConfigReaderException; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; import org.dspace.builder.ItemBuilder; @@ -54,7 +55,7 @@ public class DiscoveryRestControllerMultiLanguageIT extends AbstractControllerIn private ChoiceAuthorityService choiceAuthorityService; @After - public void after() { + public void after() throws SubmissionConfigReaderException { DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); metadataAuthorityService.clearCache(); choiceAuthorityService.clearCache(); @@ -280,247 +281,258 @@ public void discoverFacetsLanguageWithPrefixTest() throws Exception { public void discoverFacetsTypesTest() throws Exception { context.turnOffAuthorisationSystem(); - String[] supportedLanguage = { "en","uk", "it" }; - configurationService.setProperty("webui.supported.locales", supportedLanguage); - metadataAuthorityService.clearCache(); - choiceAuthorityService.clearCache(); - - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - - Collection col1 = CollectionBuilder.createCollection(context, parentCommunity, "123456789/language-test-1") - .withName("Collection 1") - .withEntityType("Publication") - .build(); - - ItemBuilder.createItem(context, col1) - .withTitle("Test 1") - .withIssueDate("2010-10-17") - .withAuthor("Testing, Works") - .withType("Research Subject Categories::MATEMATICA", "srsc:SCB14") - .build(); - - context.restoreAuthSystemState(); - - getClient().perform(get("/api/discover/facets/types") - .header("Accept-Language", Locale.ITALIAN.getLanguage()) - .param("configuration", "multilanguage-types") - .param("prefix", "matem")) - .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$.name", is("types"))) - .andExpect(jsonPath("$.facetType", is("text"))) - .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", containsInAnyOrder( - FacetValueMatcher.entryTypes("MATEMATICA","srsc:SCB14")))); - - getClient().perform(get("/api/discover/facets/types") - .header("Accept-Language", "uk") - .param("configuration", "multilanguage-types") - .param("prefix", "мат")) - .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$.name", is("types"))) - .andExpect(jsonPath("$.facetType", is("text"))) - .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", containsInAnyOrder( - FacetValueMatcher.entryTypes("МАТЕМАТИКА","srsc:SCB14")))); + try { + configurationService.setProperty("authority.controlled.dc.type", "true"); + metadataAuthorityService.clearCache(); + + String[] supportedLanguage = {"en", "uk", "it"}; + configurationService.setProperty("webui.supported.locales", supportedLanguage); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity, "123456789/language-test-1") + .withName("Collection 1") + .withEntityType("Publication") + .build(); + + ItemBuilder.createItem(context, col1) + .withTitle("Test 1") + .withIssueDate("2010-10-17") + .withAuthor("Testing, Works") + .withType("Research Subject Categories::MATEMATICA", "srsc:SCB14") + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/discover/facets/types") + .header("Accept-Language", Locale.ITALIAN.getLanguage()) + .param("configuration", "multilanguage-types") + .param("prefix", "matem")) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$.name", is("types"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))); + + getClient().perform(get("/api/discover/facets/types") + .header("Accept-Language", "uk") + .param("configuration", "multilanguage-types") + .param("prefix", "мат")) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$.name", is("types"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))); + } finally { + configurationService.setProperty("authority.controlled.dc.type", "false"); + metadataAuthorityService.clearCache(); + } } @Test public void discoverFacetsTypesTestWithoutAuthority() throws Exception { context.turnOffAuthorisationSystem(); - String[] supportedLanguage = { "en","uk", "it" }; - configurationService.setProperty("webui.supported.locales", supportedLanguage); - metadataAuthorityService.clearCache(); - choiceAuthorityService.clearCache(); - - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - - Collection col1 = CollectionBuilder.createCollection(context, parentCommunity, "123456789/language-test-1") - .withName("Collection 1") - .withEntityType("Publication") - .build(); - - ItemBuilder.createItem(context, col1) - .withTitle("Test 1") - .withIssueDate("2010-10-17") - .withAuthor("Testing, Works") - .withType("Research Subject Categories::MATEMATICA") - .build(); - - context.restoreAuthSystemState(); - - getClient().perform(get("/api/discover/facets/types") - .header("Accept-Language", Locale.ITALIAN.getLanguage()) - .param("configuration", "multilanguage-types") - .param("prefix", "research")) - .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$.name", is("types"))) - .andExpect(jsonPath("$.facetType", is("text"))) - .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", containsInAnyOrder( - FacetValueMatcher.entryTypes("Research Subject Categories::MATEMATICA")))); - - getClient().perform(get("/api/discover/facets/types") - .header("Accept-Language", "uk") - .param("configuration", "multilanguage-types") - .param("prefix", "research")) - .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$.name", is("types"))) - .andExpect(jsonPath("$.facetType", is("text"))) - .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", containsInAnyOrder( - FacetValueMatcher.entryTypes("Research Subject Categories::MATEMATICA")))); + try { + configurationService.setProperty("authority.controlled.dc.type", "true"); + metadataAuthorityService.clearCache(); + + String[] supportedLanguage = {"en", "uk", "it"}; + configurationService.setProperty("webui.supported.locales", supportedLanguage); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity, "123456789/language-test-1") + .withName("Collection 1") + .withEntityType("Publication") + .build(); + + ItemBuilder.createItem(context, col1) + .withTitle("Test 1") + .withIssueDate("2010-10-17") + .withAuthor("Testing, Works") + .withType("Research Subject Categories::MATEMATICA") + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/discover/facets/types") + .header("Accept-Language", Locale.ITALIAN.getLanguage()) + .param("configuration", "multilanguage-types") + .param("prefix", "research")) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$.name", is("types"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))); + + getClient().perform(get("/api/discover/facets/types") + .header("Accept-Language", "uk") + .param("configuration", "multilanguage-types") + .param("prefix", "research")) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$.name", is("types"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))); + } finally { + configurationService.setProperty("authority.controlled.dc.type", "false"); + metadataAuthorityService.clearCache(); + } } @Test public void discoverFacetsTypesTestWithUnknownAuthority() throws Exception { context.turnOffAuthorisationSystem(); - String[] supportedLanguage = { "en","uk", "it" }; - configurationService.setProperty("webui.supported.locales", supportedLanguage); - metadataAuthorityService.clearCache(); - choiceAuthorityService.clearCache(); - - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - - Collection col1 = CollectionBuilder.createCollection(context, parentCommunity, "123456789/language-test-1") - .withName("Collection 1") - .withEntityType("Publication") - .build(); - - ItemBuilder.createItem(context, col1) - .withTitle("Test 1") - .withIssueDate("2010-10-17") - .withAuthor("Testing, Works") - .withType("Research Subject Categories::MATEMATICA", "srsc:UNKNOWN") - .build(); - - context.restoreAuthSystemState(); - - getClient().perform(get("/api/discover/facets/types") - .header("Accept-Language", Locale.ITALIAN.getLanguage()) - .param("configuration", "multilanguage-types") - .param("prefix", "research")) - .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$.name", is("types"))) - .andExpect(jsonPath("$.facetType", is("text"))) - .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", containsInAnyOrder( - FacetValueMatcher.entryTypes("Research Subject Categories::MATEMATICA", "srsc:UNKNOWN")))); - - getClient().perform(get("/api/discover/facets/types") - .header("Accept-Language", "uk") - .param("configuration", "multilanguage-types") - .param("prefix", "research")) - .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$.name", is("types"))) - .andExpect(jsonPath("$.facetType", is("text"))) - .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", containsInAnyOrder( - FacetValueMatcher.entryTypes("Research Subject Categories::MATEMATICA", "srsc:UNKNOWN")))); + try { + configurationService.setProperty("authority.controlled.dc.type", "true"); + metadataAuthorityService.clearCache(); + + String[] supportedLanguage = {"en", "uk", "it"}; + configurationService.setProperty("webui.supported.locales", supportedLanguage); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity, "123456789/language-test-1") + .withName("Collection 1") + .withEntityType("Publication") + .build(); + + ItemBuilder.createItem(context, col1) + .withTitle("Test 1") + .withIssueDate("2010-10-17") + .withAuthor("Testing, Works") + .withType("Research Subject Categories::MATEMATICA", "srsc:UNKNOWN") + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/discover/facets/types") + .header("Accept-Language", Locale.ITALIAN.getLanguage()) + .param("configuration", "multilanguage-types") + .param("prefix", "research")) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$.name", is("types"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))); + + getClient().perform(get("/api/discover/facets/types") + .header("Accept-Language", "uk") + .param("configuration", "multilanguage-types") + .param("prefix", "research")) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$.name", is("types"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))); + } finally { + configurationService.setProperty("authority.controlled.dc.type", "false"); + metadataAuthorityService.clearCache(); + } } @Test public void discoverFacetsTypesTestWithUnknownAuthorityName() throws Exception { context.turnOffAuthorisationSystem(); - String[] supportedLanguage = { "en","uk", "it" }; - configurationService.setProperty("webui.supported.locales", supportedLanguage); - metadataAuthorityService.clearCache(); - choiceAuthorityService.clearCache(); - - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - - Collection col1 = CollectionBuilder.createCollection(context, parentCommunity, "123456789/language-test-1") - .withName("Collection 1") - .withEntityType("Publication") - .build(); - - ItemBuilder.createItem(context, col1) - .withTitle("Test 1") - .withIssueDate("2010-10-17") - .withAuthor("Testing, Works") - .withType("Research Subject Categories::MATEMATICA", "UNKNOWN:VALUE") - .build(); - - context.restoreAuthSystemState(); - - getClient().perform(get("/api/discover/facets/types") - .header("Accept-Language", Locale.ITALIAN.getLanguage()) - .param("configuration", "multilanguage-types") - .param("prefix", "research")) - .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$.name", is("types"))) - .andExpect(jsonPath("$.facetType", is("text"))) - .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", containsInAnyOrder( - FacetValueMatcher.entryTypes("Research Subject Categories::MATEMATICA", "UNKNOWN:VALUE")))); - - getClient().perform(get("/api/discover/facets/types") - .header("Accept-Language", "uk") - .param("configuration", "multilanguage-types") - .param("prefix", "research")) - .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$.name", is("types"))) - .andExpect(jsonPath("$.facetType", is("text"))) - .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", containsInAnyOrder( - FacetValueMatcher.entryTypes("Research Subject Categories::MATEMATICA", "UNKNOWN:VALUE")))); + try { + configurationService.setProperty("authority.controlled.dc.type", "true"); + metadataAuthorityService.clearCache(); + + String[] supportedLanguage = {"en", "uk", "it"}; + configurationService.setProperty("webui.supported.locales", supportedLanguage); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity, "123456789/language-test-1") + .withName("Collection 1") + .withEntityType("Publication") + .build(); + + ItemBuilder.createItem(context, col1) + .withTitle("Test 1") + .withIssueDate("2010-10-17") + .withAuthor("Testing, Works") + .withType("Research Subject Categories::MATEMATICA", "UNKNOWN:VALUE") + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/discover/facets/types") + .header("Accept-Language", Locale.ITALIAN.getLanguage()) + .param("configuration", "multilanguage-types") + .param("prefix", "research")) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$.name", is("types"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))); + + getClient().perform(get("/api/discover/facets/types") + .header("Accept-Language", "uk") + .param("configuration", "multilanguage-types") + .param("prefix", "research")) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$.name", is("types"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))); + } finally { + configurationService.setProperty("authority.controlled.dc.type", "false"); + metadataAuthorityService.clearCache(); + } } @Test public void discoverFacetsTypesTestWithWrongAuthorityFormat() throws Exception { context.turnOffAuthorisationSystem(); - String[] supportedLanguage = { "en", "uk", "it" }; - configurationService.setProperty("webui.supported.locales", supportedLanguage); - metadataAuthorityService.clearCache(); - choiceAuthorityService.clearCache(); - - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - - Collection col1 = CollectionBuilder.createCollection(context, parentCommunity, "123456789/language-test-1") - .withName("Collection 1") - .withEntityType("Publication") - .build(); - ItemBuilder.createItem(context, col1) - .withTitle("Test 1") - .withIssueDate("2010-10-17") - .withAuthor("Testing, Works") - .withType("Research Subject Categories::MATEMATICA", "authority") - .build(); - - context.restoreAuthSystemState(); - - getClient().perform(get("/api/discover/facets/types") - .header("Accept-Language", Locale.ITALIAN.getLanguage()) - .param("configuration", "multilanguage-types") - .param("prefix", "research")) - .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$.name", is("types"))) - .andExpect(jsonPath("$.facetType", is("text"))) - .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", containsInAnyOrder( - FacetValueMatcher.entryTypes("Research Subject Categories::MATEMATICA", "authority")))); - - getClient().perform(get("/api/discover/facets/types") - .header("Accept-Language", "uk") - .param("configuration", "multilanguage-types") - .param("prefix", "research")) - .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$.name", is("types"))) - .andExpect(jsonPath("$.facetType", is("text"))) - .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", containsInAnyOrder( - FacetValueMatcher.entryTypes("Research Subject Categories::MATEMATICA", "authority")))); + try { + configurationService.setProperty("authority.controlled.dc.type", "true"); + metadataAuthorityService.clearCache(); + + String[] supportedLanguage = {"en", "uk", "it"}; + configurationService.setProperty("webui.supported.locales", supportedLanguage); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity, "123456789/language-test-1") + .withName("Collection 1") + .withEntityType("Publication") + .build(); + + ItemBuilder.createItem(context, col1) + .withTitle("Test 1") + .withIssueDate("2010-10-17") + .withAuthor("Testing, Works") + .withType("Research Subject Categories::MATEMATICA", "authority") + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/discover/facets/types") + .header("Accept-Language", Locale.ITALIAN.getLanguage()) + .param("configuration", "multilanguage-types") + .param("prefix", "research")) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$.name", is("types"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))); + + getClient().perform(get("/api/discover/facets/types") + .header("Accept-Language", "uk") + .param("configuration", "multilanguage-types") + .param("prefix", "research")) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$.name", is("types"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))); + } finally { + configurationService.setProperty("authority.controlled.dc.type", "false"); + metadataAuthorityService.clearCache(); + } } -} +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java new file mode 100644 index 000000000000..18f949fe9e41 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java @@ -0,0 +1,689 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.matcher.FacetEntryMatcher; +import org.dspace.app.rest.matcher.FacetValueMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.MetadataFieldBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.service.CollectionService; +import org.junit.Before; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * This class tests the correct inheritance of Discovery configurations for sub communities and collections. + * To thoroughly test this, a community and collection structure is set up to where different communities have custom + * configurations configured for them. + * + * The following structure is uses: + * - Parent Community 1 - Custom configuration: discovery-parent-community-1 + * -- Subcommunity 11 - Custom configuration: discovery-sub-community-1-1 + * -- Collection 111 - Custom configuration: discovery-collection-1-1-1 + * -- Collection 112 + * -- Subcommunity 12 + * -- Collection 121 - Custom configuration: discovery-collection-1-2-1 + * -- Collection 122 + * - Parent Community 2 + * -- Subcommunity 21 - Custom configuration: discovery-sub-community-2-1 + * -- Collection 211 - Custom configuration: discovery-collection-2-1-1 + * -- Collection 212 + * -- Subcommunity 22 + * -- Collection 221 - Custom configuration: discovery-collection-2-2-1 + * -- Collection 222 + * + * Each custom configuration contains a unique index for a unique metadata field, to verify if correct information is + * indexed and provided for the different search scopes. + * + * Each collection has an item in it. Next to these items, there are two mapped items, one in collection 111 and 222, + * and one in collection 122 and 211. + * + * The tests will verify that for each object, the correct facets are provided and that all the necessary fields to + * power these facets are indexed properly. + * + * This file requires the discovery configuration in the following test file: + * src/test/data/dspaceFolder/config/spring/api/test-discovery.xml + */ +public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerIntegrationTest { + + @Autowired + CollectionService collectionService; + + private Community parentCommunity1; + private Community subcommunity11; + private Community subcommunity12; + private Collection collection111; + private Collection collection112; + private Collection collection121; + private Collection collection122; + + private Community parentCommunity2; + private Community subcommunity21; + private Community subcommunity22; + private Collection collection211; + private Collection collection212; + private Collection collection221; + private Collection collection222; + + @Before + public void setUp() throws Exception { + super.setUp(); + + context.turnOffAuthorisationSystem(); + + MetadataFieldBuilder.createMetadataField(context, "test", "parentcommunity1field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "subcommunity11field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection111field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection121field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "subcommunity21field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection211field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection221field", "").build(); + + parentCommunity1 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-1") + .build(); + subcommunity11 = CommunityBuilder + .createSubCommunity(context, parentCommunity1, "123456789/discovery-sub-community-1-1") + .build(); + subcommunity12 = CommunityBuilder + .createSubCommunity(context, parentCommunity1, "123456789/discovery-sub-community-1-2") + .build(); + collection111 = CollectionBuilder + .createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-1") + .build(); + collection112 = CollectionBuilder + .createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-2") + .build(); + collection121 = CollectionBuilder + .createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-1") + .build(); + + collection122 = CollectionBuilder + .createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-2") + .build(); + + parentCommunity2 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-2") + .build(); + + + subcommunity21 = CommunityBuilder + .createSubCommunity(context, parentCommunity2, "123456789/discovery-sub-community-2-1") + .build(); + subcommunity22 = CommunityBuilder + .createSubCommunity(context, parentCommunity2, "123456789/discovery-sub-community-2-2") + .build(); + collection211 = CollectionBuilder + .createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-1") + .build(); + collection212 = CollectionBuilder + .createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-2") + .build(); + collection221 = CollectionBuilder + .createCollection(context, subcommunity22, "123456789/discovery-collection-2-2-1") + .build(); + collection222 = CollectionBuilder + .createCollection(context, subcommunity22, "123456789/discovery-collection-2-2-2") + .build(); + + + Item item111 = ItemBuilder.createItem(context, collection111) + .withMetadata("dc", "contributor", "author", "author-item111") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item111") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item111") + .withMetadata("dc", "test", "collection111field", "collection111field-item111") + .withMetadata("dc", "test", "collection121field", "collection121field-item111") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item111") + .withMetadata("dc", "test", "collection211field", "collection211field-item111") + .withMetadata("dc", "test", "collection221field", "collection221field-item111") + .build(); + + Item item112 = ItemBuilder.createItem(context, collection112) + .withMetadata("dc", "contributor", "author", "author-item112") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item112") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item112") + .withMetadata("dc", "test", "collection111field", "collection111field-item112") + .withMetadata("dc", "test", "collection121field", "collection121field-item112") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item112") + .withMetadata("dc", "test", "collection211field", "collection211field-item112") + .withMetadata("dc", "test", "collection221field", "collection221field-item112") + .build(); + + Item item121 = ItemBuilder.createItem(context, collection121) + .withMetadata("dc", "contributor", "author", "author-item121") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item121") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item121") + .withMetadata("dc", "test", "collection111field", "collection111field-item121") + .withMetadata("dc", "test", "collection121field", "collection121field-item121") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item121") + .withMetadata("dc", "test", "collection211field", "collection211field-item121") + .withMetadata("dc", "test", "collection221field", "collection221field-item121") + .build(); + + Item item122 = ItemBuilder.createItem(context, collection122) + .withMetadata("dc", "contributor", "author", "author-item122") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item122") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item122") + .withMetadata("dc", "test", "collection111field", "collection111field-item122") + .withMetadata("dc", "test", "collection121field", "collection121field-item122") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item122") + .withMetadata("dc", "test", "collection211field", "collection211field-item122") + .withMetadata("dc", "test", "collection221field", "collection221field-item122") + .build(); + + Item item211 = ItemBuilder.createItem(context, collection211) + .withMetadata("dc", "contributor", "author", "author-item211") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item211") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item211") + .withMetadata("dc", "test", "collection111field", "collection111field-item211") + .withMetadata("dc", "test", "collection121field", "collection121field-item211") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item211") + .withMetadata("dc", "test", "collection211field", "collection211field-item211") + .withMetadata("dc", "test", "collection221field", "collection221field-item211") + .build(); + + Item item212 = ItemBuilder.createItem(context, collection212) + .withMetadata("dc", "contributor", "author", "author-item212") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item212") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item212") + .withMetadata("dc", "test", "collection111field", "collection111field-item212") + .withMetadata("dc", "test", "collection121field", "collection121field-item212") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item212") + .withMetadata("dc", "test", "collection211field", "collection211field-item212") + .withMetadata("dc", "test", "collection221field", "collection221field-item212") + .build(); + + Item item221 = ItemBuilder.createItem(context, collection221) + .withMetadata("dc", "contributor", "author", "author-item221") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item221") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item221") + .withMetadata("dc", "test", "collection111field", "collection111field-item221") + .withMetadata("dc", "test", "collection121field", "collection121field-item221") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item221") + .withMetadata("dc", "test", "collection211field", "collection211field-item221") + .withMetadata("dc", "test", "collection221field", "collection221field-item221") + .build(); + + Item item222 = ItemBuilder.createItem(context, collection222) + .withMetadata("dc", "contributor", "author", "author-item222") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item222") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item222") + .withMetadata("dc", "test", "collection111field", "collection111field-item222") + .withMetadata("dc", "test", "collection121field", "collection121field-item222") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item222") + .withMetadata("dc", "test", "collection211field", "collection211field-item222") + .withMetadata("dc", "test", "collection221field", "collection221field-item222") + .build(); + + Item mappedItem111222 = ItemBuilder + .createItem(context, collection111) + .withMetadata("dc", "contributor", "author", "author-mappedItem111222") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-mappedItem111222") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-mappedItem111222") + .withMetadata("dc", "test", "collection111field", "collection111field-mappedItem111222") + .withMetadata("dc", "test", "collection121field", "collection121field-mappedItem111222") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-mappedItem111222") + .withMetadata("dc", "test", "collection211field", "collection211field-mappedItem111222") + .withMetadata("dc", "test", "collection221field", "collection221field-mappedItem111222") + .build(); + + + Item mappedItem122211 = ItemBuilder + .createItem(context, collection122) + .withMetadata("dc", "contributor", "author", "author-mappedItem122211") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-mappedItem122211") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-mappedItem122211") + .withMetadata("dc", "test", "collection111field", "collection111field-mappedItem122211") + .withMetadata("dc", "test", "collection121field", "collection121field-mappedItem122211") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-mappedItem122211") + .withMetadata("dc", "test", "collection211field", "collection211field-mappedItem122211") + .withMetadata("dc", "test", "collection221field", "collection221field-mappedItem122211") + .build(); + + + collectionService.addItem(context, collection222, mappedItem111222); + collectionService.addItem(context, collection211, mappedItem122211); + + + context.dispatchEvents(); + context.restoreAuthSystemState(); + } + + @Test + /** + * Verify that the custom configuration "discovery-parent-community-1" is correctly used for Parent Community 1. + */ + public void ScopeBasedIndexingAndSearchTestParentCommunity1() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity1.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/parentcommunity1field") + .param("scope", String.valueOf(parentCommunity1.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item111", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item112", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item121", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item122", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem111222", + 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem122211", 1) + ) + )); + + + } + + @Test + /** + * Verify that the custom configuration "discovery-sub-community-1-1" is correctly used for Subcommunity 11. + */ + public void ScopeBasedIndexingAndSearchTestSubCommunity11() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity11.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "subcommunity11field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity11field") + .param("scope", String.valueOf(subcommunity11.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-item111", 1), + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-item112", 1), + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-mappedItem111222", 1) + ) + )); + } + + @Test + /** + * Verify that the custom configuration "discovery-collection-1-1-1" is correctly used for Collection 111. + */ + public void ScopeBasedIndexingAndSearchTestCollection111() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection111.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "collection111field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/collection111field") + .param("scope", String.valueOf(collection111.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection111field", + "collection111field-item111", 1), + FacetValueMatcher.matchEntry("collection111field", + "collection111field-mappedItem111222", 1) + ) + )); + } + + @Test + /** + * Verify that the first encountered custom parent configuration "discovery-sub-community-1-1" is inherited + * correctly for Collection 112. + */ + public void ScopeBasedIndexingAndSearchTestCollection112() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection112.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "subcommunity11field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity11field") + .param("scope", String.valueOf(collection112.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-item112", 1) + ) + )); + } + + @Test + /** + * Verify that the first encountered custom parent configuration "discovery-parent-community-1" is inherited + * correctly for Subcommunity 12. + */ + public void ScopeBasedIndexingAndSearchTestSubcommunity12() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity12.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/parentcommunity1field") + .param("scope", String.valueOf(subcommunity12.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item121", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item122", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem122211", 1) + ) + )); + } + + @Test + /** + * Verify that the custom configuration "discovery-collection-1-2-1" is correctly used for Collection 121. + */ + public void ScopeBasedIndexingAndSearchTestCollection121() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection121.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "collection121field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/collection121field") + .param("scope", String.valueOf(collection121.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection121field", + "collection121field-item121", 1) + ) + )); + } + + @Test + /** + * Verify that the first encountered custom parent configuration "discovery-parent-community-1" is inherited + * correctly for Collection 122. + */ + public void ScopeBasedIndexingAndSearchTestCollection122() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection122.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/parentcommunity1field") + .param("scope", String.valueOf(collection122.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item122", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem122211", 1) + ) + )); + } + + @Test + /** + * Verify that the default configuration is inherited correctly when no other custom configuration can be inherited + * for Parent Community 2. + */ + public void ScopeBasedIndexingAndSearchTestParentCommunity2() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity2.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.anyFacet("itemtype", "hierarchical"), + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.anyFacet("language", "text") + )) + ); + } + + @Test + /** + * Verify that the custom configuration "discovery-sub-community-2-1" is correctly used for Subcommunity 21. + */ + public void ScopeBasedIndexingAndSearchTestSubCommunity21() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity21.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "subcommunity21field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity21field") + .param("scope", String.valueOf(subcommunity21.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-item211", 1), + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-item212", 1), + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-mappedItem122211", 1) + ) + )); + } + + @Test + /** + * Verify that the custom configuration "discovery-collection-2-1-1" is correctly used for Collection 211. + */ + public void ScopeBasedIndexingAndSearchTestCollection211() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection211.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "collection211field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/collection211field") + .param("scope", String.valueOf(collection211.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection211field", + "collection211field-item211", 1), + FacetValueMatcher.matchEntry("collection211field", + "collection211field-mappedItem122211", 1) + ) + )); + } + + @Test + /** + * Verify that the first encountered custom parent configuration "discovery-sub-community-2-1" is inherited + * correctly for Collection 212. + */ + public void ScopeBasedIndexingAndSearchTestCollection212() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection212.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "subcommunity21field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity21field") + .param("scope", String.valueOf(collection212.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-item212", 1) + ) + )); + } + + @Test + /** + * Verify that the default configuration is inherited correctly when no other custom configuration can be inherited + * for Subcommunity 22. + */ + public void ScopeBasedIndexingAndSearchTestSubcommunity22() throws Exception { + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity22.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.anyFacet("itemtype", "hierarchical"), + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.anyFacet("language", "text") + )) + ); + } + + @Test + /** + * Verify that the custom configuration "discovery-collection-2-2-1" is correctly used for Collection 221. + */ + public void ScopeBasedIndexingAndSearchTestCollection221() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection221.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "collection221field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/collection221field") + .param("scope", String.valueOf(collection221.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection221field", + "collection221field-item221", 1) + ) + )); + } + + @Test + /** + * Verify that the default configuration is inherited correctly when no other custom configuration can be inherited + * for Collection 222. + */ + public void ScopeBasedIndexingAndSearchTestCollection222() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection222.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.anyFacet("itemtype", "hierarchical"), + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.anyFacet("language", "text") + )) + ); + } + + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/EPersonRegistrationRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/EPersonRegistrationRestControllerIT.java new file mode 100644 index 000000000000..cfff06d501f7 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/EPersonRegistrationRestControllerIT.java @@ -0,0 +1,343 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doNothing; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.matcher.MetadataMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.EPersonBuilder; +import org.dspace.content.MetadataField; +import org.dspace.content.service.MetadataFieldService; +import org.dspace.core.Email; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.RegistrationData; +import org.dspace.eperson.RegistrationTypeEnum; +import org.dspace.eperson.dto.RegistrationDataChanges; +import org.dspace.eperson.dto.RegistrationDataPatch; +import org.dspace.eperson.service.AccountService; +import org.dspace.eperson.service.RegistrationDataService; +import org.hamcrest.Matchers; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.mockito.MockedStatic; +import org.mockito.Mockito; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public class EPersonRegistrationRestControllerIT extends AbstractControllerIntegrationTest { + + private static MockedStatic emailMockedStatic; + + @Autowired + private AccountService accountService; + @Autowired + private RegistrationDataService registrationDataService; + @Autowired + private MetadataFieldService metadataFieldService; + + private RegistrationData orcidRegistration; + private MetadataField orcidMf; + private MetadataField firstNameMf; + private MetadataField lastNameMf; + private EPerson customEPerson; + private String customPassword; + + + @BeforeClass + public static void init() throws Exception { + emailMockedStatic = Mockito.mockStatic(Email.class); + } + + @AfterClass + public static void tearDownClass() throws Exception { + emailMockedStatic.close(); + } + + @Before + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + + orcidRegistration = + registrationDataService.create(context, "0000-0000-0000-0000", RegistrationTypeEnum.ORCID); + + orcidMf = + metadataFieldService.findByElement(context, "eperson", "orcid", null); + firstNameMf = + metadataFieldService.findByElement(context, "eperson", "firstname", null); + lastNameMf = + metadataFieldService.findByElement(context, "eperson", "lastname", null); + + registrationDataService.addMetadata( + context, orcidRegistration, orcidMf, "0000-0000-0000-0000" + ); + registrationDataService.addMetadata( + context, orcidRegistration, firstNameMf, "Vincenzo" + ); + registrationDataService.addMetadata( + context, orcidRegistration, lastNameMf, "Mecca" + ); + + registrationDataService.update(context, orcidRegistration); + + customPassword = "vins-01"; + customEPerson = + EPersonBuilder.createEPerson(context) + .withEmail("vincenzo.mecca@4science.com") + .withNameInMetadata("Vins", "4Science") + .withPassword(customPassword) + .withCanLogin(true) + .build(); + + context.restoreAuthSystemState(); + } + + + @Test + public void givenOrcidToken_whenPostForMerge_thenUnauthorized() throws Exception { + + getClient().perform( + post("/api/eperson/epersons/" + customEPerson.getID()) + .param("token", orcidRegistration.getToken()) + .param("override", "eperson.firtname,eperson.lastname,eperson.orcid") + ).andExpect(status().isUnauthorized()); + + } + + @Test + public void givenExpiredToken_whenPostForMerge_thenUnauthorized() throws Exception { + + context.turnOffAuthorisationSystem(); + registrationDataService.markAsExpired(context, orcidRegistration); + context.restoreAuthSystemState(); + + getClient().perform( + post("/api/eperson/epersons/" + customEPerson.getID()) + .param("token", orcidRegistration.getToken()) + .param("override", "eperson.firtname,eperson.lastname,eperson.orcid") + ).andExpect(status().isUnauthorized()); + + } + + @Test + public void givenExpiredToken_whenPostAuthForMerge_thenForbidden() throws Exception { + + context.turnOffAuthorisationSystem(); + registrationDataService.markAsExpired(context, orcidRegistration); + context.restoreAuthSystemState(); + + String tokenAdmin = getAuthToken(admin.getEmail(), password); + + getClient(tokenAdmin).perform( + post("/api/eperson/epersons/" + customEPerson.getID()) + .param("token", orcidRegistration.getToken()) + .param("override", "eperson.firtname,eperson.lastname,eperson.orcid") + ).andExpect(status().isForbidden()); + + } + + @Test + public void givenValidationRegistration_whenPostAuthDiffersFromIdPathParam_thenForbidden() throws Exception { + + context.turnOffAuthorisationSystem(); + RegistrationData validationRegistration = + registrationDataService.create(context, "0000-0000-0000-0000", RegistrationTypeEnum.VALIDATION_ORCID); + context.restoreAuthSystemState(); + + String tokenAdmin = getAuthToken(admin.getEmail(), password); + + getClient(tokenAdmin).perform( + post("/api/eperson/epersons/" + customEPerson.getID()) + .param("token", validationRegistration.getToken()) + ).andExpect(status().isForbidden()); + + } + + @Test + public void givenValidationRegistration_whenPostWithoutOverride_thenCreated() throws Exception { + + Email spy = Mockito.spy(Email.class); + doNothing().when(spy).send(); + + emailMockedStatic.when(() -> Email.getEmail(any())).thenReturn(spy); + + context.turnOffAuthorisationSystem(); + RegistrationDataChanges changes = + new RegistrationDataChanges("vincenzo.mecca@4science.com", RegistrationTypeEnum.VALIDATION_ORCID); + RegistrationData validationRegistration = + this.accountService.renewRegistrationForEmail( + context, new RegistrationDataPatch(orcidRegistration, changes) + ); + context.restoreAuthSystemState(); + + String customToken = getAuthToken(customEPerson.getEmail(), customPassword); + + getClient(customToken).perform( + post("/api/eperson/epersons/" + customEPerson.getID()) + .param("token", validationRegistration.getToken()) + ).andExpect(status().isCreated()); + + } + + @Test + public void givenValidationRegistration_whenPostWithOverride_thenCreated() throws Exception { + + Email spy = Mockito.spy(Email.class); + doNothing().when(spy).send(); + + emailMockedStatic.when(() -> Email.getEmail(any())).thenReturn(spy); + + context.turnOffAuthorisationSystem(); + RegistrationDataChanges changes = + new RegistrationDataChanges("vincenzo.mecca@4science.com", RegistrationTypeEnum.VALIDATION_ORCID); + RegistrationData validationRegistration = + this.accountService.renewRegistrationForEmail( + context, new RegistrationDataPatch(orcidRegistration, changes) + ); + context.restoreAuthSystemState(); + + String customToken = getAuthToken(customEPerson.getEmail(), customPassword); + + getClient(customToken).perform( + post("/api/eperson/epersons/" + customEPerson.getID()) + .param("token", validationRegistration.getToken()) + .param("override", "eperson.firstname,eperson.lastname") + ).andExpect(status().isCreated()); + + } + + @Test + public void givenValidationRegistration_whenPostWithoutOverride_thenOnlyNewMetadataAdded() throws Exception { + + Email spy = Mockito.spy(Email.class); + doNothing().when(spy).send(); + + emailMockedStatic.when(() -> Email.getEmail(any())).thenReturn(spy); + + context.turnOffAuthorisationSystem(); + RegistrationDataChanges changes = + new RegistrationDataChanges("vincenzo.mecca@4science.com", RegistrationTypeEnum.VALIDATION_ORCID); + RegistrationData validationRegistration = + this.accountService.renewRegistrationForEmail( + context, new RegistrationDataPatch(orcidRegistration, changes) + ); + context.restoreAuthSystemState(); + + String customToken = getAuthToken(customEPerson.getEmail(), customPassword); + + getClient(customToken).perform( + post("/api/eperson/epersons/" + customEPerson.getID()) + .param("token", validationRegistration.getToken()) + ).andExpect(status().isCreated()) + .andExpect( + jsonPath("$.netid", equalTo("0000-0000-0000-0000")) + ) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("eperson.firstname", "Vins"), + MetadataMatcher.matchMetadata("eperson.lastname", "4Science"), + MetadataMatcher.matchMetadata("eperson.orcid", "0000-0000-0000-0000") + ) + ) + ); + + } + + @Test + public void givenValidationRegistration_whenPostWithOverride_thenMetadataReplaced() throws Exception { + + Email spy = Mockito.spy(Email.class); + doNothing().when(spy).send(); + + emailMockedStatic.when(() -> Email.getEmail(any())).thenReturn(spy); + + context.turnOffAuthorisationSystem(); + RegistrationDataChanges changes = + new RegistrationDataChanges("vincenzo.mecca@4science.com", RegistrationTypeEnum.VALIDATION_ORCID); + RegistrationData validationRegistration = + this.accountService.renewRegistrationForEmail( + context, new RegistrationDataPatch(orcidRegistration, changes) + ); + context.restoreAuthSystemState(); + + String customToken = getAuthToken(customEPerson.getEmail(), customPassword); + + getClient(customToken).perform( + post("/api/eperson/epersons/" + customEPerson.getID()) + .param("token", validationRegistration.getToken()) + .param("override", "eperson.firstname,eperson.lastname") + ).andExpect(status().isCreated()) + .andExpect( + jsonPath("$.netid", equalTo("0000-0000-0000-0000")) + ) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("eperson.firstname", "Vincenzo"), + MetadataMatcher.matchMetadata("eperson.lastname", "Mecca"), + MetadataMatcher.matchMetadata("eperson.orcid", "0000-0000-0000-0000") + ) + ) + ); + + } + + @Test + public void givenValidationRegistration_whenPostWithOverrideAndMetadataNotFound_thenBadRequest() throws Exception { + + Email spy = Mockito.spy(Email.class); + doNothing().when(spy).send(); + + emailMockedStatic.when(() -> Email.getEmail(any())).thenReturn(spy); + + context.turnOffAuthorisationSystem(); + RegistrationDataChanges changes = + new RegistrationDataChanges("vincenzo.mecca@4science.com", RegistrationTypeEnum.VALIDATION_ORCID); + RegistrationData validationRegistration = + this.accountService.renewRegistrationForEmail( + context, new RegistrationDataPatch(orcidRegistration, changes) + ); + context.restoreAuthSystemState(); + + String customToken = getAuthToken(customEPerson.getEmail(), customPassword); + + getClient(customToken).perform( + post("/api/eperson/epersons/" + customEPerson.getID()) + .param("token", validationRegistration.getToken()) + .param("override", "eperson.phone") + ).andExpect(status().isBadRequest()); + + context.turnOffAuthorisationSystem(); + MetadataField phoneMf = + metadataFieldService.findByElement(context, "eperson", "phone", null); + + registrationDataService.addMetadata( + context, validationRegistration, phoneMf, "1234567890" + ); + context.restoreAuthSystemState(); + + getClient(customToken).perform( + post("/api/eperson/epersons/" + customEPerson.getID()) + .param("token", validationRegistration.getToken()) + .param("override", "eperson.phone") + ).andExpect(status().isBadRequest()); + + } + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/EPersonRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/EPersonRestRepositoryIT.java index fab9dffa4616..be63f83ffbd8 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/EPersonRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/EPersonRestRepositoryIT.java @@ -36,6 +36,7 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; +import java.sql.SQLException; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collections; @@ -66,6 +67,7 @@ import org.dspace.app.rest.model.patch.ReplaceOperation; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.rest.test.MetadataPatchSuite; +import org.dspace.authorize.AuthorizeException; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; import org.dspace.builder.EPersonBuilder; @@ -74,10 +76,14 @@ import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.Item; +import org.dspace.content.MetadataField; +import org.dspace.content.service.MetadataFieldService; import org.dspace.core.I18nUtil; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.PasswordHash; +import org.dspace.eperson.RegistrationData; +import org.dspace.eperson.RegistrationTypeEnum; import org.dspace.eperson.dao.RegistrationDataDAO; import org.dspace.eperson.service.AccountService; import org.dspace.eperson.service.EPersonService; @@ -112,6 +118,9 @@ public class EPersonRestRepositoryIT extends AbstractControllerIntegrationTest { @Autowired private ConfigurationService configurationService; + @Autowired + private MetadataFieldService metadataFieldService; + @Test public void createTest() throws Exception { // we should check how to get it from Spring @@ -816,6 +825,242 @@ public void findByMetadataMissingParameter() throws Exception { .andExpect(status().isBadRequest()); } + // Test of /epersons/search/isNotMemberOf pagination + // NOTE: Additional tests of 'isNotMemberOf' search functionality can be found in EPersonTest in 'dspace-api' + @Test + public void searchIsNotMemberOfPaginationTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group group = GroupBuilder.createGroup(context) + .withName("Test Parent group") + .build(); + // Create two EPerson in main group. These SHOULD NOT be included in pagination + EPersonBuilder.createEPerson(context) + .withNameInMetadata("Test", "Person") + .withEmail("test@example.com") + .withGroupMembership(group) + .build(); + EPersonBuilder.createEPerson(context) + .withNameInMetadata("Test2", "Person") + .withEmail("test2@example.com") + .withGroupMembership(group) + .build(); + + // Create five EPersons who are NOT members of that group. These SHOULD be included in pagination + EPersonBuilder.createEPerson(context) + .withNameInMetadata("Test3", "Person") + .withEmail("test3@example.com") + .build(); + EPersonBuilder.createEPerson(context) + .withNameInMetadata("Test4", "Person") + .withEmail("test4@example.com") + .build(); + EPersonBuilder.createEPerson(context) + .withNameInMetadata("Test5", "Person") + .withEmail("test5@example.com") + .build(); + EPersonBuilder.createEPerson(context) + .withNameInMetadata("Test6", "Person") + .withEmail("test6@example.com") + .build(); + EPersonBuilder.createEPerson(context) + .withNameInMetadata("Test7", "Person") + .withEmail("test7@example.com") + .build(); + + context.restoreAuthSystemState(); + + String authTokenAdmin = getAuthToken(admin.getEmail(), password); + getClient(authTokenAdmin).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("group", group.getID().toString()) + .param("query", "person") + .param("page", "0") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.epersons", Matchers.everyItem( + hasJsonPath("$.type", is("eperson"))) + )) + .andExpect(jsonPath("$._embedded.epersons").value(Matchers.hasSize(2))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(0))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + + getClient(authTokenAdmin).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("group", group.getID().toString()) + .param("query", "person") + .param("page", "1") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.epersons", Matchers.everyItem( + hasJsonPath("$.type", is("eperson"))) + )) + .andExpect(jsonPath("$._embedded.epersons").value(Matchers.hasSize(2))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(1))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + + getClient(authTokenAdmin).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("group", group.getID().toString()) + .param("query", "person") + .param("page", "2") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.epersons", Matchers.everyItem( + hasJsonPath("$.type", is("eperson"))) + )) + .andExpect(jsonPath("$._embedded.epersons").value(Matchers.hasSize(1))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(2))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + } + + @Test + public void searchIsNotMemberOfByEmail() throws Exception { + context.turnOffAuthorisationSystem(); + Group group = GroupBuilder.createGroup(context) + .withName("Test group") + .build(); + Group group2 = GroupBuilder.createGroup(context) + .withName("Test another group") + .build(); + EPerson ePerson = EPersonBuilder.createEPerson(context) + .withNameInMetadata("John", "Doe") + .withEmail("Johndoe@example.com") + .withGroupMembership(group) + .build(); + + EPerson ePerson2 = EPersonBuilder.createEPerson(context) + .withNameInMetadata("Jane", "Smith") + .withEmail("janesmith@example.com") + .build(); + + EPerson ePerson3 = EPersonBuilder.createEPerson(context) + .withNameInMetadata("Tom", "Doe") + .withEmail("tomdoe@example.com") + .build(); + + EPerson ePerson4 = EPersonBuilder.createEPerson(context) + .withNameInMetadata("Harry", "Prefix-Doe") + .withEmail("harrydoeprefix@example.com") + .build(); + + context.restoreAuthSystemState(); + + String authToken = getAuthToken(admin.getEmail(), password); + // Search for exact email in a group the person already belongs to. Should return zero results. + getClient(authToken).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("query", ePerson.getEmail()) + .param("group", group.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", is(0))); + + // Search for exact email in a group the person does NOT belong to. Should return the person + getClient(authToken).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("query", ePerson.getEmail()) + .param("group", group2.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.epersons", Matchers.contains( + EPersonMatcher.matchEPersonEntry(ePerson) + ))) + .andExpect(jsonPath("$.page.totalElements", is(1))); + + // Search partial email should return all the people created above. + getClient(authToken).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("query", "example.com") + .param("group", group2.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.epersons", Matchers.containsInAnyOrder( + EPersonMatcher.matchEPersonEntry(ePerson), + EPersonMatcher.matchEPersonEntry(ePerson2), + EPersonMatcher.matchEPersonEntry(ePerson3), + EPersonMatcher.matchEPersonEntry(ePerson4) + ))); + } + + @Test + public void searchIsNotMemberOfByUUID() throws Exception { + context.turnOffAuthorisationSystem(); + Group group = GroupBuilder.createGroup(context) + .withName("Test group") + .build(); + Group group2 = GroupBuilder.createGroup(context) + .withName("Test another group") + .build(); + EPerson ePerson = EPersonBuilder.createEPerson(context) + .withNameInMetadata("John", "Doe") + .withEmail("Johndoe@example.com") + .withGroupMembership(group) + .build(); + context.restoreAuthSystemState(); + + String authToken = getAuthToken(admin.getEmail(), password); + // Search for UUID in a group the person already belongs to. Should return zero results. + getClient(authToken).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("query", ePerson.getID().toString()) + .param("group", group.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", is(0))); + + // Search for exact email in a group the person does NOT belong to. Should return the person + getClient(authToken).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("query", ePerson.getID().toString()) + .param("group", group2.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.epersons", Matchers.contains( + EPersonMatcher.matchEPersonEntry(ePerson) + ))) + .andExpect(jsonPath("$.page.totalElements", is(1))); + } + + @Test + public void searchIsNotMemberOfUnauthorized() throws Exception { + Group adminGroup = groupService.findByName(context, Group.ADMIN); + getClient().perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("query", eperson.getID().toString()) + .param("group", adminGroup.getID().toString())) + .andExpect(status().isUnauthorized()); + } + + @Test + public void searchIsNotMemberOfForbidden() throws Exception { + Group adminGroup = groupService.findByName(context, Group.ADMIN); + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("query", eperson.getID().toString()) + .param("group", adminGroup.getID().toString())) + .andExpect(status().isForbidden()); + } + + @Test + public void searchIsNotMemberOfMissingOrInvalidParameter() throws Exception { + Group adminGroup = groupService.findByName(context, Group.ADMIN); + String authToken = getAuthToken(admin.getEmail(), password); + getClient(authToken).perform(get("/api/eperson/epersons/search/isNotMemberOf")) + .andExpect(status().isBadRequest()); + + getClient(authToken).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("query", eperson.getID().toString())) + .andExpect(status().isBadRequest()); + + getClient(authToken).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("group", adminGroup.getID().toString())) + .andExpect(status().isBadRequest()); + + // Test invalid group UUID + getClient(authToken).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("query", eperson.getID().toString()) + .param("group", "not-a-uuid")) + .andExpect(status().isBadRequest()); + } + @Test public void deleteOne() throws Exception { context.turnOffAuthorisationSystem(); @@ -2988,6 +3233,138 @@ public void postEPersonWithTokenWithEmailPropertyAnonUser() throws Exception { } } + + @Test + public void postEpersonFromOrcidRegistrationToken() throws Exception { + + context.turnOffAuthorisationSystem(); + + String registrationEmail = "vincenzo.mecca@4science.com"; + RegistrationData orcidRegistration = + createRegistrationData(RegistrationTypeEnum.ORCID, registrationEmail); + + context.restoreAuthSystemState(); + + ObjectMapper mapper = new ObjectMapper(); + EPersonRest ePersonRest = new EPersonRest(); + MetadataRest metadataRest = new MetadataRest(); + ePersonRest.setEmail(registrationEmail); + ePersonRest.setCanLogIn(true); + ePersonRest.setNetid(orcidRegistration.getNetId()); + MetadataValueRest surname = new MetadataValueRest(); + surname.setValue("Doe"); + metadataRest.put("eperson.lastname", surname); + MetadataValueRest firstname = new MetadataValueRest(); + firstname.setValue("John"); + metadataRest.put("eperson.firstname", firstname); + ePersonRest.setMetadata(metadataRest); + + AtomicReference idRef = new AtomicReference(); + + try { + getClient().perform(post("/api/eperson/epersons") + .param("token", orcidRegistration.getToken()) + .content(mapper.writeValueAsBytes(ePersonRest)) + .contentType(MediaType.APPLICATION_JSON)) + .andExpect(status().isCreated()) + .andDo(result -> idRef + .set(UUID.fromString(read(result.getResponse().getContentAsString(), "$.id")))); + } finally { + EPersonBuilder.deleteEPerson(idRef.get()); + } + } + + + @Test + public void postEPersonFromOrcidValidationRegistrationToken() throws Exception { + + context.turnOffAuthorisationSystem(); + + String registrationEmail = "vincenzo.mecca@4science.com"; + RegistrationData orcidRegistration = + createRegistrationData(RegistrationTypeEnum.VALIDATION_ORCID, registrationEmail); + + context.restoreAuthSystemState(); + + ObjectMapper mapper = new ObjectMapper(); + EPersonRest ePersonRest = createEPersonRest(registrationEmail, orcidRegistration.getNetId()); + + AtomicReference idRef = new AtomicReference<>(); + + try { + getClient().perform(post("/api/eperson/epersons") + .param("token", orcidRegistration.getToken()) + .content(mapper.writeValueAsBytes(ePersonRest)) + .contentType(MediaType.APPLICATION_JSON)) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.uuid", not(empty())), + // is it what you expect? EPerson.getName() returns the email... + //hasJsonPath("$.name", is("Doe John")), + hasJsonPath("$.email", is(registrationEmail)), + hasJsonPath("$.type", is("eperson")), + hasJsonPath("$.netid", is("0000-0000-0000-0000")), + hasJsonPath("$._links.self.href", not(empty())), + hasJsonPath("$.metadata", Matchers.allOf( + matchMetadata("eperson.firstname", "Vincenzo"), + matchMetadata("eperson.lastname", "Mecca"), + matchMetadata("eperson.orcid", "0000-0000-0000-0000") + ))))) + .andDo(result -> idRef + .set(UUID.fromString(read(result.getResponse().getContentAsString(), "$.id")))); + } finally { + EPersonBuilder.deleteEPerson(idRef.get()); + } + } + + @Test + public void postEpersonNetIdWithoutPasswordNotExternalRegistrationToken() throws Exception { + + ObjectMapper mapper = new ObjectMapper(); + + String newRegisterEmail = "new-register@fake-email.com"; + RegistrationRest registrationRest = new RegistrationRest(); + registrationRest.setEmail(newRegisterEmail); + registrationRest.setNetId("0000-0000-0000-0000"); + getClient().perform(post("/api/eperson/registrations") + .param(TYPE_QUERY_PARAM, TYPE_REGISTER) + .contentType(MediaType.APPLICATION_JSON) + .content(mapper.writeValueAsBytes(registrationRest))) + .andExpect(status().isCreated()); + + RegistrationData byEmail = registrationDataService.findByEmail(context, newRegisterEmail); + + String newRegisterToken = byEmail.getToken(); + + EPersonRest ePersonRest = new EPersonRest(); + MetadataRest metadataRest = new MetadataRest(); + ePersonRest.setEmail(newRegisterEmail); + ePersonRest.setCanLogIn(true); + ePersonRest.setNetid("0000-0000-0000-0000"); + MetadataValueRest surname = new MetadataValueRest(); + surname.setValue("Doe"); + metadataRest.put("eperson.lastname", surname); + MetadataValueRest firstname = new MetadataValueRest(); + firstname.setValue("John"); + metadataRest.put("eperson.firstname", firstname); + ePersonRest.setMetadata(metadataRest); + + String token = getAuthToken(admin.getEmail(), password); + + try { + getClient().perform(post("/api/eperson/epersons") + .param("token", newRegisterToken) + .content(mapper.writeValueAsBytes(ePersonRest)) + .contentType(MediaType.APPLICATION_JSON)) + .andExpect(status().isBadRequest()); + } finally { + context.turnOffAuthorisationSystem(); + registrationDataService.delete(context, byEmail); + context.restoreAuthSystemState(); + } + } + + @Test public void findByMetadataByCommAdminAndByColAdminTest() throws Exception { context.turnOffAuthorisationSystem(); @@ -3534,6 +3911,7 @@ public void patchChangePasswordWithNoCurrentPassword() throws Exception { .andExpect(status().isForbidden()); } + private String buildPasswordAddOperationPatchBody(String password, String currentPassword) { Map value = new HashMap<>(); @@ -3548,4 +3926,51 @@ private String buildPasswordAddOperationPatchBody(String password, String curren } + private static EPersonRest createEPersonRest(String registrationEmail, String netId) { + EPersonRest ePersonRest = new EPersonRest(); + MetadataRest metadataRest = new MetadataRest(); + ePersonRest.setEmail(registrationEmail); + ePersonRest.setCanLogIn(true); + ePersonRest.setNetid(netId); + MetadataValueRest surname = new MetadataValueRest(); + surname.setValue("Mecca"); + metadataRest.put("eperson.lastname", surname); + MetadataValueRest firstname = new MetadataValueRest(); + firstname.setValue("Vincenzo"); + metadataRest.put("eperson.firstname", firstname); + MetadataValueRest orcid = new MetadataValueRest(); + orcid.setValue("0000-0000-0000-0000"); + metadataRest.put("eperson.orcid", orcid); + ePersonRest.setMetadata(metadataRest); + return ePersonRest; + } + + private RegistrationData createRegistrationData(RegistrationTypeEnum validationOrcid, String registrationEmail) + throws SQLException, AuthorizeException { + RegistrationData orcidRegistration = + registrationDataService.create(context, "0000-0000-0000-0000", validationOrcid); + orcidRegistration.setEmail(registrationEmail); + + MetadataField orcidMf = + metadataFieldService.findByElement(context, "eperson", "orcid", null); + MetadataField firstNameMf = + metadataFieldService.findByElement(context, "eperson", "firstname", null); + MetadataField lastNameMf = + metadataFieldService.findByElement(context, "eperson", "lastname", null); + + registrationDataService.addMetadata( + context, orcidRegistration, orcidMf, "0000-0000-0000-0000" + ); + registrationDataService.addMetadata( + context, orcidRegistration, firstNameMf, "Vincenzo" + ); + registrationDataService.addMetadata( + context, orcidRegistration, lastNameMf, "Mecca" + ); + + registrationDataService.update(context, orcidRegistration); + return orcidRegistration; + } + + } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/EpoImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/EpoImportMetadataSourceServiceIT.java index d86dd1875d8d..f6b944d34264 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/EpoImportMetadataSourceServiceIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/EpoImportMetadataSourceServiceIT.java @@ -148,28 +148,35 @@ private ArrayList getRecords() { List metadatums = new ArrayList(); MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "other", "epodoc:ES2902749T"); MetadatumDTO patentno = createMetadatumDTO("dc", "identifier", "patentno", "ES2902749T"); + MetadatumDTO kind = createMetadatumDTO("crispatent", "kind", null, "T3"); MetadatumDTO identifier = createMetadatumDTO("dc", "identifier", "applicationnumber", "18705153"); MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2022-03-29"); MetadatumDTO dateSubmitted = createMetadatumDTO("dcterms", "dateSubmitted", null, "2018-02-19"); - MetadatumDTO applicant = createMetadatumDTO("dc", "contributor", null, "PANKA BLOOD TEST GMBH"); - MetadatumDTO applicant2 = createMetadatumDTO("dc", "contributor", null, "Panka Blood Test GmbH"); + MetadatumDTO applicant = createMetadatumDTO("dc", "contributor", null, "Panka Blood Test GmbH"); MetadatumDTO author = createMetadatumDTO("dc", "contributor", "author", "PANTEL, Klaus, "); MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "BARTKOWIAK, Kai"); MetadatumDTO title = createMetadatumDTO("dc", "title", null, "Método para el diagnóstico del cáncer de mama"); MetadatumDTO subject = createMetadatumDTO("dc", "subject", null, "G01N 33/ 574 A I "); + MetadatumDTO kindCodeInline = createMetadatumDTO("crispatent", "document", "kind", "T3"); + MetadatumDTO issueDateInline = createMetadatumDTO("crispatent", "document", "issueDate", "2022-03-29"); + MetadatumDTO titleInline = createMetadatumDTO("crispatent", "document", "title", + "Método para el diagnóstico del cáncer de mama"); metadatums.add(identifierOther); metadatums.add(patentno); + metadatums.add(kind); metadatums.add(identifier); metadatums.add(date); metadatums.add(dateSubmitted); metadatums.add(applicant); - metadatums.add(applicant2); metadatums.add(author); metadatums.add(author2); metadatums.add(title); metadatums.add(subject); + metadatums.add(kindCodeInline); + metadatums.add(issueDateInline); + metadatums.add(titleInline); ImportRecord firstrRecord = new ImportRecord(metadatums); @@ -177,11 +184,11 @@ private ArrayList getRecords() { List metadatums2 = new ArrayList(); MetadatumDTO identifierOther2 = createMetadatumDTO("dc", "identifier", "other", "epodoc:TW202202864"); MetadatumDTO patentno2 = createMetadatumDTO("dc", "identifier", "patentno", "TW202202864"); + MetadatumDTO kind2 = createMetadatumDTO("crispatent", "kind", null, "A"); MetadatumDTO identifier2 = createMetadatumDTO("dc", "identifier", "applicationnumber", "109122801"); MetadatumDTO date2 = createMetadatumDTO("dc", "date", "issued", "2022-01-16"); MetadatumDTO dateSubmitted2 = createMetadatumDTO("dcterms", "dateSubmitted", null, "2020-07-06"); - MetadatumDTO applicant3 = createMetadatumDTO("dc", "contributor", null, "ADVANTEST CORP [JP]"); - MetadatumDTO applicant4 = createMetadatumDTO("dc", "contributor", null, "ADVANTEST CORPORATION"); + MetadatumDTO applicant2 = createMetadatumDTO("dc", "contributor", null, "ADVANTEST CORPORATION"); MetadatumDTO author5 = createMetadatumDTO("dc", "contributor", "author", "POEPPE, OLAF, "); MetadatumDTO author6 = createMetadatumDTO("dc", "contributor", "author", "HILLIGES, KLAUS-DIETER, "); MetadatumDTO author7 = createMetadatumDTO("dc", "contributor", "author", "KRECH, ALAN"); @@ -192,19 +199,29 @@ private ArrayList getRecords() { "G01R 31/ 319 A I "); MetadatumDTO subject3 = createMetadatumDTO("dc", "subject", null, "G01R 31/ 3193 A I "); + MetadatumDTO kindCodeInline2 = createMetadatumDTO("crispatent", "document", "kind", "A"); + MetadatumDTO issueDateInline2 = createMetadatumDTO("crispatent", "document", "issueDate", "2022-01-16"); + MetadatumDTO titleInline2 = createMetadatumDTO("crispatent", "document", "title", + "Automated test equipment for testing one or more devices under test," + + " method for automated testing of one or more devices under test," + + " and computer program using a buffer memory"); + metadatums2.add(identifierOther2); metadatums2.add(patentno2); + metadatums2.add(kind2); metadatums2.add(identifier2); metadatums2.add(date2); metadatums2.add(dateSubmitted2); - metadatums2.add(applicant3); - metadatums2.add(applicant4); + metadatums2.add(applicant2); metadatums2.add(author5); metadatums2.add(author6); metadatums2.add(author7); metadatums2.add(title2); metadatums2.add(subject2); metadatums2.add(subject3); + metadatums2.add(kindCodeInline2); + metadatums2.add(issueDateInline2); + metadatums2.add(titleInline2); ImportRecord secondRecord = new ImportRecord(metadatums2); records.add(firstrRecord); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ExternalSourcesRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ExternalSourcesRestControllerIT.java index 565a4d003f78..7c396e803537 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ExternalSourcesRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ExternalSourcesRestControllerIT.java @@ -19,15 +19,18 @@ import org.dspace.app.rest.matcher.EntityTypeMatcher; import org.dspace.app.rest.matcher.ExternalSourceEntryMatcher; import org.dspace.app.rest.matcher.ExternalSourceMatcher; +import org.dspace.app.rest.matcher.ItemMatcher; import org.dspace.app.rest.matcher.PageMatcher; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; import org.dspace.builder.EntityTypeBuilder; +import org.dspace.builder.ItemBuilder; import org.dspace.builder.WorkflowItemBuilder; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.EntityType; +import org.dspace.content.Item; import org.dspace.core.CrisConstants; import org.dspace.external.provider.AbstractExternalDataProvider; import org.dspace.external.provider.ExternalDataProvider; @@ -485,4 +488,60 @@ public void findSupportedEntityTypesOfAnExternalDataProviderPaginationTest() thr } } + @Test + public void findOneExternalSourceEntriesDuplicationTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + // create item withDoiIdentifier equals 10.1016/j.procs.2017.03.031 + Item itemOne = ItemBuilder.createItem(context, col1) + .withFullName("Public item one") + .withIssueDate("2023-10-17") + .withDoiIdentifier("10.1016/j.procs.2017.03.031") + .withEntityType("Publication") + .build(); + + // create another item withDoiIdentifier equals 10.1016/j.procs.2017.03.031 + Item itemTwo = ItemBuilder.createItem(context, col1) + .withFullName("Public item two") + .withIssueDate("2023-10-17") + .withDoiIdentifier("10.1016/j.procs.2017.03.031") + .withEntityType("Publication") + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/integration/externalsources/mock/entries") + .param("query", "one").param("size", "1")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.externalSourceEntries", Matchers.hasItem( + ExternalSourceEntryMatcher.matchExternalSourceEntry("onetwo", "onetwo", "onetwo", "mock") + ))) + .andExpect(jsonPath("$._embedded.externalSourceEntries[0].matchObjects", containsInAnyOrder( + ItemMatcher.matchItemProperties(itemOne), + ItemMatcher.matchItemProperties(itemTwo) + ))) + .andExpect(jsonPath("$.page", PageMatcher.pageEntryWithTotalPagesAndElements(0, 1, 2, 2))); + + getClient().perform(get("/api/integration/externalsources/mock/entries") + .param("query", "one").param("size", "1").param("page", "1")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.externalSourceEntries", Matchers.hasItem( + ExternalSourceEntryMatcher.matchExternalSourceEntry("one", "one", "one", "mock") + ))) + .andExpect(jsonPath("$._embedded.externalSourceEntries[0].matchObjects", containsInAnyOrder( + ItemMatcher.matchItemProperties(itemOne), + ItemMatcher.matchItemProperties(itemTwo) + ))) + .andExpect(jsonPath("$.page", PageMatcher.pageEntryWithTotalPagesAndElements(1, 1, 2, 2))); + } + } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/GroupRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/GroupRestRepositoryIT.java index 241930dbec37..473bdad73bfa 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/GroupRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/GroupRestRepositoryIT.java @@ -3090,6 +3090,343 @@ public void findByMetadataPaginationTest() throws Exception { } + // Test of /groups/[uuid]/epersons pagination + @Test + public void epersonMemberPaginationTest() throws Exception { + context.turnOffAuthorisationSystem(); + + EPerson eperson1 = EPersonBuilder.createEPerson(context) + .withEmail("test1@example.com") + .withNameInMetadata("Test1", "User") + .build(); + EPerson eperson2 = EPersonBuilder.createEPerson(context) + .withEmail("test2@example.com") + .withNameInMetadata("Test2", "User") + .build(); + EPerson eperson3 = EPersonBuilder.createEPerson(context) + .withEmail("test3@example.com") + .withNameInMetadata("Test3", "User") + .build(); + EPerson eperson4 = EPersonBuilder.createEPerson(context) + .withEmail("test4@example.com") + .withNameInMetadata("Test4", "User") + .build(); + EPerson eperson5 = EPersonBuilder.createEPerson(context) + .withEmail("test5@example.com") + .withNameInMetadata("Test5", "User") + .build(); + + Group group = GroupBuilder.createGroup(context) + .withName("Test group") + .addMember(eperson1) + .addMember(eperson2) + .addMember(eperson3) + .addMember(eperson4) + .addMember(eperson5) + .build(); + + context.restoreAuthSystemState(); + + String authTokenAdmin = getAuthToken(admin.getEmail(), password); + getClient(authTokenAdmin).perform(get("/api/eperson/groups/" + group.getID() + "/epersons") + .param("page", "0") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.epersons", Matchers.everyItem( + hasJsonPath("$.type", is("eperson"))) + )) + .andExpect(jsonPath("$._embedded.epersons").value(Matchers.hasSize(2))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(0))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + + getClient(authTokenAdmin).perform(get("/api/eperson/groups/" + group.getID() + "/epersons") + .param("page", "1") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.epersons", Matchers.everyItem( + hasJsonPath("$.type", is("eperson"))) + )) + .andExpect(jsonPath("$._embedded.epersons").value(Matchers.hasSize(2))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(1))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + + getClient(authTokenAdmin).perform(get("/api/eperson/groups/" + group.getID() + "/epersons") + .param("page", "2") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.epersons", Matchers.everyItem( + hasJsonPath("$.type", is("eperson"))) + )) + .andExpect(jsonPath("$._embedded.epersons").value(Matchers.hasSize(1))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(2))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + } + + // Test of /groups/[uuid]/subgroups pagination + @Test + public void subgroupPaginationTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group group = GroupBuilder.createGroup(context) + .withName("Test group") + .build(); + + GroupBuilder.createGroup(context) + .withParent(group) + .withName("Test subgroup 1") + .build(); + GroupBuilder.createGroup(context) + .withParent(group) + .withName("Test subgroup 2") + .build(); + GroupBuilder.createGroup(context) + .withParent(group) + .withName("Test subgroup 3") + .build(); + GroupBuilder.createGroup(context) + .withParent(group) + .withName("Test subgroup 4") + .build(); + GroupBuilder.createGroup(context) + .withParent(group) + .withName("Test subgroup 5") + .build(); + + context.restoreAuthSystemState(); + + String authTokenAdmin = getAuthToken(admin.getEmail(), password); + getClient(authTokenAdmin).perform(get("/api/eperson/groups/" + group.getID() + "/subgroups") + .param("page", "0") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.subgroups", Matchers.everyItem( + hasJsonPath("$.type", is("group"))) + )) + .andExpect(jsonPath("$._embedded.subgroups").value(Matchers.hasSize(2))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(0))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + + getClient(authTokenAdmin).perform(get("/api/eperson/groups/" + group.getID() + "/subgroups") + .param("page", "1") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.subgroups", Matchers.everyItem( + hasJsonPath("$.type", is("group"))) + )) + .andExpect(jsonPath("$._embedded.subgroups").value(Matchers.hasSize(2))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(1))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + + getClient(authTokenAdmin).perform(get("/api/eperson/groups/" + group.getID() + "/subgroups") + .param("page", "2") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.subgroups", Matchers.everyItem( + hasJsonPath("$.type", is("group"))) + )) + .andExpect(jsonPath("$._embedded.subgroups").value(Matchers.hasSize(1))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(2))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + } + + // Test of /groups/search/isNotMemberOf pagination + // NOTE: Additional tests of 'isNotMemberOf' search functionality can be found in GroupTest in 'dspace-api' + @Test + public void searchIsNotMemberOfPaginationTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group group = GroupBuilder.createGroup(context) + .withName("Test Parent group") + .build(); + // Create two subgroups of main group. These SHOULD NOT be included in pagination + GroupBuilder.createGroup(context) + .withParent(group) + .withName("Test group 1") + .build(); + GroupBuilder.createGroup(context) + .withParent(group) + .withName("Test group 2") + .build(); + + // Create five non-member groups. These SHOULD be included in pagination + GroupBuilder.createGroup(context) + .withName("Test group 3") + .build(); + GroupBuilder.createGroup(context) + .withName("Test group 4") + .build(); + GroupBuilder.createGroup(context) + .withName("Test group 5") + .build(); + GroupBuilder.createGroup(context) + .withName("Test group 6") + .build(); + GroupBuilder.createGroup(context) + .withName("Test group 7") + .build(); + + context.restoreAuthSystemState(); + + String authTokenAdmin = getAuthToken(admin.getEmail(), password); + getClient(authTokenAdmin).perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("group", group.getID().toString()) + .param("query", "test group") + .param("page", "0") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.groups", Matchers.everyItem( + hasJsonPath("$.type", is("group"))) + )) + .andExpect(jsonPath("$._embedded.groups").value(Matchers.hasSize(2))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(0))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + + getClient(authTokenAdmin).perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("group", group.getID().toString()) + .param("query", "test group") + .param("page", "1") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.groups", Matchers.everyItem( + hasJsonPath("$.type", is("group"))) + )) + .andExpect(jsonPath("$._embedded.groups").value(Matchers.hasSize(2))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(1))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + + getClient(authTokenAdmin).perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("group", group.getID().toString()) + .param("query", "test group") + .param("page", "2") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.groups", Matchers.everyItem( + hasJsonPath("$.type", is("group"))) + )) + .andExpect(jsonPath("$._embedded.groups").value(Matchers.hasSize(1))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(2))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + } + + @Test + public void searchIsNotMemberOfByUUID() throws Exception { + context.turnOffAuthorisationSystem(); + // Create two groups which have no parent group + Group group1 = GroupBuilder.createGroup(context) + .withName("Test Parent group 1") + .build(); + + Group group2 = GroupBuilder.createGroup(context) + .withName("Test Parent group 2") + .build(); + + // Create a subgroup of parent group 1 + Group group3 = GroupBuilder.createGroup(context) + .withParent(group1) + .withName("Test subgroup") + .build(); + context.restoreAuthSystemState(); + + String authTokenAdmin = getAuthToken(admin.getEmail(), password); + // Search for UUID in a group that the subgroup already belongs to. Should return ZERO results. + getClient(authTokenAdmin).perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("group", group1.getID().toString()) + .param("query", group3.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", is(0))); + + // Search for UUID in a group that the subgroup does NOT belong to. Should return group via exact match + getClient(authTokenAdmin).perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("group", group2.getID().toString()) + .param("query", group3.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.groups", Matchers.contains( + GroupMatcher.matchGroupEntry(group3.getID(), group3.getName()) + ))) + .andExpect(jsonPath("$.page.totalElements", is(1))); + + // Search for UUID of the group in the "group" param. Should return ZERO results, as "group" param is excluded + getClient(authTokenAdmin).perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("group", group1.getID().toString()) + .param("query", group1.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", is(0))); + } + + @Test + public void searchIsNotMemberOfUnauthorized() throws Exception { + // To avoid creating data, just use the Admin & Anon groups for this test + GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + Group adminGroup = groupService.findByName(context, Group.ADMIN); + Group anonGroup = groupService.findByName(context, Group.ANONYMOUS); + + getClient().perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("query", anonGroup.getID().toString()) + .param("group", adminGroup.getID().toString())) + .andExpect(status().isUnauthorized()); + } + + @Test + public void searchIsNotMemberOfForbidden() throws Exception { + // To avoid creating data, just use the Admin & Anon groups for this test + GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + Group adminGroup = groupService.findByName(context, Group.ADMIN); + Group anonGroup = groupService.findByName(context, Group.ANONYMOUS); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("query", anonGroup.getID().toString()) + .param("group", adminGroup.getID().toString())) + .andExpect(status().isForbidden()); + } + + @Test + public void searchIsNotMemberOfMissingOrInvalidParameter() throws Exception { + // To avoid creating data, just use the Admin & Anon groups for this test + GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + Group adminGroup = groupService.findByName(context, Group.ADMIN); + Group anonGroup = groupService.findByName(context, Group.ANONYMOUS); + + String authToken = getAuthToken(admin.getEmail(), password); + getClient(authToken).perform(get("/api/eperson/groups/search/isNotMemberOf")) + .andExpect(status().isBadRequest()); + + getClient(authToken).perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("query", anonGroup.getID().toString())) + .andExpect(status().isBadRequest()); + + getClient(authToken).perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("group", adminGroup.getID().toString())) + .andExpect(status().isBadRequest()); + + // Test invalid group UUID + getClient(authToken).perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("query", anonGroup.getID().toString()) + .param("group", "not-a-uuid")) + .andExpect(status().isBadRequest()); + } + @Test public void commAdminAndColAdminCannotExploitItemReadGroupTest() throws Exception { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/HealthIndicatorsIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/HealthIndicatorsIT.java index 8c1c534de14c..0bb679339877 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/HealthIndicatorsIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/HealthIndicatorsIT.java @@ -67,7 +67,7 @@ public void testWithAdminUser() throws Exception { match("solrSearchCore", Status.UP, Map.of("status", 0, "detectedPathType", "root")), match("solrStatisticsCore", Status.UP, Map.of("status", 0, "detectedPathType", "root")), match("geoIp", UP_WITH_ISSUES_STATUS, - Map.of("reason", "The required 'dbfile' configuration is missing in solr-statistics.cfg!")) + Map.of("reason", "The required 'dbfile' configuration is missing in usage-statistics.cfg!")) ))); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemAuthorityIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemAuthorityIT.java index 9e0ea90afc69..cdfbf2f29b13 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemAuthorityIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemAuthorityIT.java @@ -127,19 +127,25 @@ public void singleItemAuthorityTest() throws Exception { Map.of("data-oairecerif_author_affiliation", "OrgUnit_1::" + orgUnit_1.getID(), "oairecerif_author_affiliation", "OrgUnit_1::" - + orgUnit_1.getID())), + + orgUnit_1.getID(), + "data-person_identifier_orcid", "", + "person_identifier_orcid", "")), ItemAuthorityMatcher.matchItemAuthorityWithOtherInformations(author_2.getID().toString(), "Author 2", "Author 2", "vocabularyEntry", Map.of("data-oairecerif_author_affiliation", "OrgUnit_1::" + orgUnit_1.getID(), "oairecerif_author_affiliation", "OrgUnit_1::" - + orgUnit_1.getID())), + + orgUnit_1.getID(), + "data-person_identifier_orcid", "", + "person_identifier_orcid", "")), ItemAuthorityMatcher.matchItemAuthorityWithOtherInformations(author_3.getID().toString(), "Author 3", "Author 3", "vocabularyEntry", Map.of("data-oairecerif_author_affiliation", "OrgUnit_2::" + orgUnit_2.getID(), "oairecerif_author_affiliation", "OrgUnit_2::" - + orgUnit_2.getID())) + + orgUnit_2.getID(), + "data-person_identifier_orcid", "", + "person_identifier_orcid", "")) ))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(3))); } @@ -216,13 +222,19 @@ public void multiItemAuthorityTest() throws Exception { "Author 2(OrgUnit_2)", "Author 2", "vocabularyEntry", Map.of("data-oairecerif_author_affiliation", "OrgUnit_2::" + orgUnit_2.getID(), "oairecerif_author_affiliation", "OrgUnit_2::" + orgUnit_2.getID())), + ItemAuthorityMatcher.matchItemAuthorityWithOtherInformations(author_1.getID().toString(), + "Author 1", "Author 1", "vocabularyEntry", + Map.of("data-person_identifier_orcid", "", "person_identifier_orcid", "")), + ItemAuthorityMatcher.matchItemAuthorityWithOtherInformations(author_2.getID().toString(), + "Author 2", "Author 2", "vocabularyEntry", + Map.of("data-person_identifier_orcid", "", "person_identifier_orcid", "")), // filled with EditorAuthority extra metadata generator - ItemAuthorityMatcher.matchItemAuthorityProperties(author_1.getID().toString(), - "Author 1", "Author 1", "vocabularyEntry"), - ItemAuthorityMatcher.matchItemAuthorityProperties(author_2.getID().toString(), - "Author 2", "Author 2", "vocabularyEntry") + ItemAuthorityMatcher.matchItemAuthorityWithOtherInformations(author_1.getID().toString(), + "Author 1", "Author 1", "vocabularyEntry", Map.of()), + ItemAuthorityMatcher.matchItemAuthorityWithOtherInformations(author_2.getID().toString(), + "Author 2", "Author 2", "vocabularyEntry", Map.of()) ))) - .andExpect(jsonPath("$.page.totalElements", Matchers.is(5))); + .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); } @Test @@ -250,7 +262,9 @@ public void singleItemAuthorityWithoutOrgUnitTest() throws Exception { .andExpect(jsonPath("$._embedded.entries", Matchers.contains( ItemAuthorityMatcher.matchItemAuthorityWithOtherInformations(author_1.getID().toString(), "Author 1", "Author 1", "vocabularyEntry", - Map.of("data-oairecerif_author_affiliation", "", "oairecerif_author_affiliation", "")) + Map.of("data-oairecerif_author_affiliation", "", "oairecerif_author_affiliation", "", + "data-person_identifier_orcid", "", + "person_identifier_orcid", "")) ))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java index b59eb9fd8a23..f3877445895a 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java @@ -12,8 +12,10 @@ import static org.dspace.app.matcher.OrcidQueueMatcher.matches; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadataDoesNotExist; +import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadataNotEmpty; import static org.dspace.builder.OrcidHistoryBuilder.createOrcidHistory; import static org.dspace.builder.OrcidQueueBuilder.createOrcidQueue; +import static org.dspace.core.Constants.READ; import static org.dspace.core.Constants.WRITE; import static org.dspace.orcid.OrcidOperation.DELETE; import static org.dspace.profile.OrcidEntitySyncPreference.ALL; @@ -409,10 +411,13 @@ public void findAllByIdTest() throws Exception { context.restoreAuthSystemState(); String token = getAuthToken(admin.getEmail(), password); - // We want to test that only and exclusively existing items are returned. + // We want to test that only and exclusively existing items are returned + // and each item is returned just one time getClient(token).perform(get("/api/core/items/search/findAllById") .param("id", publicItem1.getID().toString(), + publicItem1.getID().toString(), + UUID.randomUUID().toString(), publicItem2.getID().toString(), UUID.randomUUID().toString() )) @@ -3141,10 +3146,43 @@ public void testHiddenMetadataForUserWithWriteRights() throws Exception { String token = getAuthToken(eperson.getEmail(), password); getClient(token).perform(get("/api/core/items/" + item.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", ItemMatcher.matchItemProperties(item))) - .andExpect(jsonPath("$.metadata", matchMetadata("dc.title", "Public item 1"))) - .andExpect(jsonPath("$.metadata", matchMetadataDoesNotExist("dc.description.provenance"))); + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ItemMatcher.matchItemProperties(item))) + .andExpect(jsonPath("$.metadata", matchMetadata("dc.title", "Public item 1"))) + .andExpect(jsonPath("$.metadata", matchMetadataNotEmpty("dc.description.provenance"))); + + } + + @Test + public void testHiddenMetadataForUserWithReadRights() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1").build(); + + Item item = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withProvenanceData("Provenance data") + .build(); + + context.restoreAuthSystemState(); + + + ResourcePolicyBuilder.createResourcePolicy(context) + .withUser(eperson) + .withAction(READ) + .withDspaceObject(item) + .build(); + + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(get("/api/core/items/" + item.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ItemMatcher.matchItemProperties(item))) + .andExpect(jsonPath("$.metadata", matchMetadata("dc.title", "Public item 1"))) + .andExpect(jsonPath("$.metadata", matchMetadataDoesNotExist("dc.description.provenance"))); } @@ -4517,8 +4555,8 @@ public void putItemMetadataWithUserNotPartOfGroupConfigured() throws Exception { itemRest.setInArchive(true); itemRest.setDiscoverable(true); itemRest.setWithdrawn(false); - String token = getAuthToken(admin.getEmail(), password); - MvcResult mvcResult = getClient(token).perform(post("/api/core/items?owningCollection=" + + String adminToken = getAuthToken(admin.getEmail(), password); + MvcResult mvcResult = getClient(adminToken).perform(post("/api/core/items?owningCollection=" + col1.getID().toString()) .content(mapper.writeValueAsBytes(itemRest)) .contentType(contentType)) @@ -4534,12 +4572,25 @@ public void putItemMetadataWithUserNotPartOfGroupConfigured() throws Exception { itemRest.setHandle(itemHandleString); Group group = GroupBuilder.createGroup(context).build(); configurationService.setProperty("edit.metadata.allowed-group", group.getID()); + // add write rights to the user + ResourcePolicyBuilder.createResourcePolicy(context) + .withUser(eperson) + .withAction(WRITE) + .withDspaceObject(itemService.find(context, UUID.fromString(itemUuidString))) + .build(); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); // expect forbidden, the user is not part of the group set in property {{edit.metadata.allowed-group}} getClient(token).perform(put("/api/core/items/" + itemUuidString) .content(mapper.writeValueAsBytes(itemRest)) .contentType(contentType)) .andExpect(status().isForbidden()); + // admins should still be able to use put + getClient(adminToken).perform(put("/api/core/items/" + itemUuidString) + .content(mapper.writeValueAsBytes(itemRest)) + .contentType(contentType)) + .andExpect(status().isOk()); } finally { ItemBuilder.deleteItem(UUID.fromString(itemUuidString)); } @@ -4600,7 +4651,7 @@ public void putItemMetadataWithUserPartOfGroupConfigured() throws Exception { context.restoreAuthSystemState(); token = getAuthToken(eperson.getEmail(), password); configurationService.setProperty("edit.metadata.allowed-group", group.getID()); - // expect forbidden, the user is not part of the group set in property {{edit.metadata.allowed-group}} + // expect ok, the user is part of the group set in property {{edit.metadata.allowed-group}} getClient(token).perform(put("/api/core/items/" + itemUuidString) .content(mapper.writeValueAsBytes(itemRest)) .contentType(contentType)) @@ -4872,7 +4923,7 @@ public void findVersionForItemWithoutVersionsWithVersioningDisabledTest() throws public void patchItemMetadataWithUserPartOfGroupConfigured() throws Exception { context.turnOffAuthorisationSystem(); // add admin person as member to the group - Group group = GroupBuilder.createGroup(context).addMember(admin).build(); + Group group = GroupBuilder.createGroup(context).addMember(eperson).build(); groupService.update(context, group); context.commit(); // ** GIVEN ** @@ -4895,15 +4946,19 @@ public void patchItemMetadataWithUserPartOfGroupConfigured() throws Exception { .build(); // add write permission to the user admin ResourcePolicyBuilder.createResourcePolicy(context) - .withUser(admin) + .withUser(eperson) .withAction(WRITE) .withDspaceObject(itemService.find(context, item.getID())) .build(); context.restoreAuthSystemState(); configurationService.setProperty("edit.metadata.allowed-group", group.getID()); - String token = getAuthToken(admin.getEmail(), password); + String token = getAuthToken(eperson.getEmail(), password); List ops = new ArrayList(); - ReplaceOperation replaceOperation = new ReplaceOperation("/withdrawn", true); + List> titleValue = new ArrayList<>(); + Map value = new HashMap(); + value.put("value", "New title"); + titleValue.add(value); + ReplaceOperation replaceOperation = new ReplaceOperation("/metadata/dc.title", titleValue); ops.add(replaceOperation); String patchBody = getPatchContent(ops); // withdraw item @@ -4913,8 +4968,7 @@ public void patchItemMetadataWithUserPartOfGroupConfigured() throws Exception { .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) .andExpect(status().isOk()) .andExpect(jsonPath("$.uuid", Matchers.is(item.getID().toString()))) - .andExpect(jsonPath("$.withdrawn", Matchers.is(true))) - .andExpect(jsonPath("$.inArchive", Matchers.is(false))); + .andExpect(jsonPath("$.metadata['dc.title'][0].value", Matchers.is("New title"))); } @Test @@ -4939,7 +4993,7 @@ public void patchItemMetadataWithUserNotPartOfGroupConfigured() throws Exception .build(); // add write rights to the user admin ResourcePolicyBuilder.createResourcePolicy(context) - .withUser(admin) + .withUser(eperson) .withAction(WRITE) .withDspaceObject(itemService.find(context, item.getID())) .build(); @@ -4949,9 +5003,13 @@ public void patchItemMetadataWithUserNotPartOfGroupConfigured() throws Exception context.commit(); context.restoreAuthSystemState(); configurationService.setProperty("edit.metadata.allowed-group", group.getID()); - String token = getAuthToken(admin.getEmail(), password); + String token = getAuthToken(eperson.getEmail(), password); List ops = new ArrayList(); - ReplaceOperation replaceOperation = new ReplaceOperation("/withdrawn", true); + List> titleValue = new ArrayList<>(); + Map value = new HashMap(); + value.put("value", "New title"); + titleValue.add(value); + ReplaceOperation replaceOperation = new ReplaceOperation("/metadata/dc.title", titleValue); ops.add(replaceOperation); String patchBody = getPatchContent(ops); // withdraw item @@ -4960,6 +5018,14 @@ public void patchItemMetadataWithUserNotPartOfGroupConfigured() throws Exception .content(patchBody) .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) .andExpect(status().isForbidden()); + token = getAuthToken(admin.getEmail(), password); + //expect ok as admin + getClient(token).perform(patch("/api/core/items/" + item.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.uuid", Matchers.is(item.getID().toString()))) + .andExpect(jsonPath("$.metadata['dc.title'][0].value", Matchers.is("New title"))); } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemTemplateRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemTemplateRestControllerIT.java index 55e82831f3d1..1fd9e81ca88d 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemTemplateRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemTemplateRestControllerIT.java @@ -33,6 +33,7 @@ import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ResourcePolicyBuilder; import org.dspace.content.Collection; import org.dspace.core.Constants; import org.hamcrest.Matchers; @@ -243,6 +244,35 @@ public void patchTemplateItem() throws Exception { ))))); } + /* Similar to patchTemplateItem(), except it is for collection admin, not repository admin + Test case was simplified, since it does not do anything else. + */ + @Test + public void patchTemplateItemAsCollectionAdmin() throws Exception { + setupTestTemplate(); + + String itemId = installTestTemplate(); + + ResourcePolicyBuilder.createResourcePolicy(context).withUser(eperson) + .withAction(Constants.ADMIN) + .withDspaceObject(childCollection).build(); + String collAdminToken = getAuthToken(eperson.getEmail(), password); + + getClient(collAdminToken).perform(patch(getTemplateItemUrlTemplate(itemId)) + .content(patchBody) + .contentType(contentType)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.type", is("itemtemplate")) + ))); + + getClient(collAdminToken).perform(get(getCollectionTemplateItemUrlTemplate(childCollection.getID().toString()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.type", is("itemtemplate")) + ))); + } + @Test public void patchIllegalInArchiveTemplateItem() throws Exception { setupTestTemplate(); @@ -337,6 +367,22 @@ public void deleteTemplateItem() throws Exception { .andExpect(status().isNoContent()); } + /*Similar to deleteTemplateItem(), except it is for collection admin, not repository admin + */ + @Test + public void deleteTemplateItemAsCollectionAdmin() throws Exception { + setupTestTemplate(); + String itemId = installTestTemplate(); + + ResourcePolicyBuilder.createResourcePolicy(context).withUser(eperson) + .withAction(Constants.ADMIN) + .withDspaceObject(childCollection).build(); + String collAdminToken = getAuthToken(eperson.getEmail(), password); + + getClient(collAdminToken).perform(delete(getTemplateItemUrlTemplate(itemId))) + .andExpect(status().isNoContent()); + } + @Test public void deleteTemplateItemNoRights() throws Exception { setupTestTemplate(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/LayoutSecurityIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/LayoutSecurityIT.java index 5d293ce2fbc3..aa5705beb861 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/LayoutSecurityIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/LayoutSecurityIT.java @@ -53,6 +53,7 @@ import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.RegistrationDataService; import org.dspace.layout.CrisLayoutBox; +import org.dspace.layout.CrisLayoutBox2SecurityGroup; import org.dspace.layout.LayoutSecurity; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; import org.hamcrest.Matchers; @@ -491,14 +492,20 @@ public void customDataTestWithOneGroup() throws Exception { .build(); // Create Group with member userA - Set groups = new HashSet<>(); + Set box2SecurityGroups = new HashSet<>(); Group testGroup = GroupBuilder.createGroup(context) .withName("testGroup") .addMember(userA) .build(); - groups.add(testGroup); - box1.setGroupSecurityFields(groups); + new CrisLayoutBox2SecurityGroup( + new CrisLayoutBox2SecurityGroup.CrisLayoutBox2SecurityGroupId(box1, testGroup), + box1, testGroup, null); + + box2SecurityGroups.add(new CrisLayoutBox2SecurityGroup( + new CrisLayoutBox2SecurityGroup.CrisLayoutBox2SecurityGroupId(box1, testGroup), + box1, testGroup, null)); + box1.setBox2SecurityGroups(box2SecurityGroups); CrisLayoutFieldBuilder.createMetadataField(context, abs, 0, 0) .withLabel("LABEL ABS") @@ -577,7 +584,7 @@ public void customDataTestWithMultipleGroup() throws Exception { .build(); // Create Group with member userA - Set boxGroups = new HashSet<>(); + Set boxGroups = new HashSet<>(); Group testGroup = GroupBuilder.createGroup(context) .withName("testGroup") @@ -589,9 +596,14 @@ public void customDataTestWithMultipleGroup() throws Exception { .addMember(userB) .build(); - boxGroups.add(testGroup); - boxGroups.add(testGroup1); - box1.setGroupSecurityFields(boxGroups); + boxGroups.add(new CrisLayoutBox2SecurityGroup( + new CrisLayoutBox2SecurityGroup.CrisLayoutBox2SecurityGroupId(box1, testGroup), + box1, testGroup, null)); + boxGroups.add(new CrisLayoutBox2SecurityGroup( + new CrisLayoutBox2SecurityGroup.CrisLayoutBox2SecurityGroupId(box1, testGroup1), + box1, testGroup, null)); + + box1.setBox2SecurityGroups(boxGroups); CrisLayoutFieldBuilder.createMetadataField(context, abs, 0, 0) .withLabel("LABEL ABS") diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java index f1a1a095b16e..72508a0dad58 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java @@ -88,7 +88,7 @@ public void createSuccess() throws Exception { context.turnOffAuthorisationSystem(); MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, "ATest", "ANamespace") - .build(); + .build(); context.restoreAuthSystemState(); MetadataSchemaRest metadataSchemaRest = metadataSchemaConverter.convert(metadataSchema, Projection.DEFAULT); @@ -116,6 +116,41 @@ public void createSuccess() throws Exception { } } + @Test + public void createUnprocessableEntity_prefixContainingInvalidCharacters() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, "ATest", "ANamespace") + .build(); + context.restoreAuthSystemState(); + + MetadataSchemaRest metadataSchemaRest = metadataSchemaConverter.convert(metadataSchema, Projection.DEFAULT); + metadataSchemaRest.setPrefix("test.SchemaName"); + metadataSchemaRest.setNamespace(TEST_NAMESPACE); + + String authToken = getAuthToken(admin.getEmail(), password); + + getClient(authToken) + .perform(post("/api/core/metadataschemas") + .content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataSchemaRest.setPrefix("test,SchemaName"); + getClient(authToken) + .perform(post("/api/core/metadataschemas") + .content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataSchemaRest.setPrefix("test SchemaName"); + getClient(authToken) + .perform(post("/api/core/metadataschemas") + .content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + } + @Test public void createUnauthorizedTest() throws Exception { @@ -202,7 +237,7 @@ public void update() throws Exception { MetadataSchemaRest metadataSchemaRest = new MetadataSchemaRest(); metadataSchemaRest.setId(metadataSchema.getID()); - metadataSchemaRest.setPrefix(TEST_NAME_UPDATED); + metadataSchemaRest.setPrefix(TEST_NAME); metadataSchemaRest.setNamespace(TEST_NAMESPACE_UPDATED); getClient(getAuthToken(admin.getEmail(), password)) @@ -214,7 +249,33 @@ public void update() throws Exception { getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$", MetadataschemaMatcher - .matchEntry(TEST_NAME_UPDATED, TEST_NAMESPACE_UPDATED))); + .matchEntry(TEST_NAME, TEST_NAMESPACE_UPDATED))); + } + + @Test + public void update_schemaNameShouldThrowError() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, TEST_NAME, TEST_NAMESPACE) + .build(); + + context.restoreAuthSystemState(); + + MetadataSchemaRest metadataSchemaRest = new MetadataSchemaRest(); + metadataSchemaRest.setId(metadataSchema.getID()); + metadataSchemaRest.setPrefix(TEST_NAME_UPDATED); + metadataSchemaRest.setNamespace(TEST_NAMESPACE_UPDATED); + + getClient(getAuthToken(admin.getEmail(), password)) + .perform(put("/api/core/metadataschemas/" + metadataSchema.getID()) + .content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", MetadataschemaMatcher + .matchEntry(TEST_NAME, TEST_NAMESPACE))); } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java index 70b76e1afd6d..a615c58c11ae 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java @@ -9,6 +9,7 @@ import static com.jayway.jsonpath.JsonPath.read; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -49,12 +50,12 @@ */ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegrationTest { - private static final String ELEMENT = "test element"; - private static final String QUALIFIER = "test qualifier"; + private static final String ELEMENT = "test_element"; + private static final String QUALIFIER = "test_qualifier"; private static final String SCOPE_NOTE = "test scope_note"; - private static final String ELEMENT_UPDATED = "test element updated"; - private static final String QUALIFIER_UPDATED = "test qualifier updated"; + private static final String ELEMENT_UPDATED = "test_element_updated"; + private static final String QUALIFIER_UPDATED = "test_qualifier_updated"; private static final String SCOPE_NOTE_UPDATED = "test scope_note updated"; private MetadataSchema metadataSchema; @@ -564,6 +565,70 @@ public void findByFieldName_exactName_combinedDiscoveryQueryParams_qualifier() t .andExpect(status().isUnprocessableEntity()); } + @Test + public void findByFieldName_sortByFieldNameASC() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema", + "http://www.dspace.org/ns/aschema").build(); + + MetadataField metadataField1 = MetadataFieldBuilder + .createMetadataField(context, schema, "2", null, "AScopeNote").build(); + + MetadataField metadataField2 = MetadataFieldBuilder + .createMetadataField(context, schema, "1", null, "AScopeNote").build(); + + MetadataField metadataField3 = MetadataFieldBuilder + .createMetadataField(context, schema, "1", "a", "AScopeNote").build(); + + context.restoreAuthSystemState(); + + getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) + .param("query", schema.getName()) + .param("sort", "fieldName,ASC")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.metadatafields", contains( + MetadataFieldMatcher.matchMetadataField(metadataField2), + MetadataFieldMatcher.matchMetadataField(metadataField3), + MetadataFieldMatcher.matchMetadataField(metadataField1) + ))) + .andExpect(jsonPath("$.page.size", is(20))) + .andExpect(jsonPath("$.page.totalElements", is(3))); + } + + @Test + public void findByFieldName_sortByFieldNameDESC() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema", + "http://www.dspace.org/ns/aschema").build(); + + MetadataField metadataField1 = MetadataFieldBuilder + .createMetadataField(context, schema, "2", null, "AScopeNote").build(); + + MetadataField metadataField2 = MetadataFieldBuilder + .createMetadataField(context, schema, "1", null, "AScopeNote").build(); + + MetadataField metadataField3 = MetadataFieldBuilder + .createMetadataField(context, schema, "1", "a", "AScopeNote").build(); + + context.restoreAuthSystemState(); + + getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) + .param("query", schema.getName()) + .param("sort", "fieldName,DESC")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.metadatafields", contains( + MetadataFieldMatcher.matchMetadataField(metadataField1), + MetadataFieldMatcher.matchMetadataField(metadataField3), + MetadataFieldMatcher.matchMetadataField(metadataField2) + ))) + .andExpect(jsonPath("$.page.size", is(20))) + .andExpect(jsonPath("$.page.totalElements", is(3))); + } + @Test public void createSuccess() throws Exception { @@ -575,7 +640,8 @@ public void createSuccess() throws Exception { String authToken = getAuthToken(admin.getEmail(), password); AtomicReference idRef = new AtomicReference<>(); try { - assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, QUALIFIER), nullValue()); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); getClient(authToken) .perform(post("/api/core/metadatafields") @@ -606,7 +672,8 @@ public void createBlankQualifier() throws Exception { String authToken = getAuthToken(admin.getEmail(), password); Integer id = null; try { - assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, null), nullValue()); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + null), nullValue()); id = read( getClient(authToken) @@ -641,7 +708,8 @@ public void create_checkAddedToIndex() throws Exception { String authToken = getAuthToken(admin.getEmail(), password); AtomicReference idRef = new AtomicReference<>(); try { - assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, QUALIFIER), nullValue()); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); getClient(authToken) .perform(post("/api/core/metadatafields") @@ -689,6 +757,94 @@ public void createUnauthorized() throws Exception { .andExpect(status().isUnauthorized()); } + @Test + public void createUnprocessableEntity_elementContainingInvalidCharacters() throws Exception { + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setElement("testElement.ForCreate"); + metadataFieldRest.setQualifier(QUALIFIER); + metadataFieldRest.setScopeNote(SCOPE_NOTE); + + String authToken = getAuthToken(admin.getEmail(), password); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataFieldRest.setElement("testElement,ForCreate"); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataFieldRest.setElement("testElement ForCreate"); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + } + + @Test + public void createUnprocessableEntity_qualifierContainingInvalidCharacters() throws Exception { + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setElement(ELEMENT); + metadataFieldRest.setQualifier("testQualifier.ForCreate"); + metadataFieldRest.setScopeNote(SCOPE_NOTE); + + String authToken = getAuthToken(admin.getEmail(), password); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataFieldRest.setQualifier("testQualifier,ForCreate"); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataFieldRest.setQualifier("testQualifier ForCreate"); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + } + @Test public void createUnauthorizedEPersonNoAdminRights() throws Exception { @@ -832,31 +988,81 @@ public void update() throws Exception { context.turnOffAuthorisationSystem(); MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) - .build(); + .build(); + + context.restoreAuthSystemState(); + + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setId(metadataField.getID()); + metadataFieldRest.setElement(ELEMENT); + metadataFieldRest.setQualifier(QUALIFIER); + metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED); + + getClient(getAuthToken(admin.getEmail(), password)) + .perform(put("/api/core/metadatafields/" + metadataField.getID()) + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isOk()); + } + + @Test + public void update_elementShouldThrowError() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) + .build(); context.restoreAuthSystemState(); MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); metadataFieldRest.setId(metadataField.getID()); metadataFieldRest.setElement(ELEMENT_UPDATED); + metadataFieldRest.setQualifier(QUALIFIER); + metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED); + + getClient(getAuthToken(admin.getEmail(), password)) + .perform(put("/api/core/metadatafields/" + metadataField.getID()) + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + getClient().perform(get("/api/core/metadatafields/" + metadataField.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys( + metadataSchema.getName(), ELEMENT, QUALIFIER) + )); + } + + @Test + public void update_qualifierShouldThrowError() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) + .build(); + + context.restoreAuthSystemState(); + + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setId(metadataField.getID()); + metadataFieldRest.setElement(ELEMENT); metadataFieldRest.setQualifier(QUALIFIER_UPDATED); metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED); getClient(getAuthToken(admin.getEmail(), password)) .perform(put("/api/core/metadatafields/" + metadataField.getID()) - .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) - .contentType(contentType)) - .andExpect(status().isOk()); + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); getClient().perform(get("/api/core/metadatafields/" + metadataField.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys( - metadataSchema.getName(), ELEMENT_UPDATED, QUALIFIER_UPDATED) - )); + .andExpect(status().isOk()) + .andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys( + metadataSchema.getName(), ELEMENT, QUALIFIER) + )); } @Test - public void update_checkUpdatedInIndex() throws Exception { + public void update_checkNotUpdatedInIndex() throws Exception { context.turnOffAuthorisationSystem(); MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) @@ -885,27 +1091,27 @@ public void update_checkUpdatedInIndex() throws Exception { .perform(put("/api/core/metadatafields/" + metadataField.getID()) .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) .contentType(contentType)) - .andExpect(status().isOk()); + .andExpect(status().isUnprocessableEntity()); - // new metadata field found in index + // new metadata field not found in index getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) .param("schema", metadataSchema.getName()) .param("element", ELEMENT_UPDATED) .param("qualifier", QUALIFIER_UPDATED)) .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem( - MetadataFieldMatcher.matchMetadataFieldByKeys(metadataSchema.getName(), - ELEMENT_UPDATED, QUALIFIER_UPDATED)) - )) - .andExpect(jsonPath("$.page.totalElements", is(1))); + .andExpect(jsonPath("$.page.totalElements", is(0))); - // original metadata field not found in index + // original metadata field found in index getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) .param("schema", metadataSchema.getName()) .param("element", metadataField.getElement()) .param("qualifier", metadataField.getQualifier())) .andExpect(status().isOk()) - .andExpect(jsonPath("$.page.totalElements", is(0))); + .andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem( + MetadataFieldMatcher.matchMetadataFieldByKeys(metadataSchema.getName(), + ELEMENT, QUALIFIER)) + )) + .andExpect(jsonPath("$.page.totalElements", is(1))); } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java index 2eb9cae64ae9..62cc6b11c32a 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java @@ -56,8 +56,8 @@ */ public class OrcidAuthorityIT extends AbstractControllerIntegrationTest { - private static final String ORCID_INFO = OrcidAuthority.ORCID_EXTRA; - private static final String ORCID_INSTITUTION = OrcidAuthority.INSTITUTION_EXTRA; + private static final String ORCID_INFO = OrcidAuthority.DEFAULT_ORCID_KEY; + private static final String ORCID_INSTITUTION = OrcidAuthority.DEFAULT_INSTITUTION_KEY; private static final String READ_PUBLIC_TOKEN = "062d9f30-7e11-47ef-bd95-eaa2f2452565"; @@ -141,9 +141,9 @@ public void testWithWillBeGeneratedAuthorityPrefix() throws Exception { affiliationEntry(author_2, "Author 2", ""), affiliationEntry(author_3, "Author 3", "OrgUnit_2::" + id(orgUnit_2)), affiliationEntry(author_4, "Author 4", "OrgUnit_1::" + id(orgUnit_1)), - orcidEntry("Author From Orcid 1", GENERATE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", GENERATE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", GENERATE, "0000-5555-6666-7777")))) + orcidEntry("From Orcid 1 Author", GENERATE, "0000-1111-2222-3333"), + orcidEntry("From Orcid 2 Author", GENERATE, "0000-2222-3333-4444"), + orcidEntry("From Orcid 3 Author", GENERATE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -188,9 +188,9 @@ public void testWithWillBeReferencedAuthorityPrefix() throws Exception { affiliationEntry(author_2, "Author 2", ""), affiliationEntry(author_3, "Author 3", "OrgUnit_2::" + id(orgUnit_2)), affiliationEntry(author_4, "Author 4", "OrgUnit_1::" + id(orgUnit_1)), - orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) + orcidEntry("From Orcid 1 Author", REFERENCE, "0000-1111-2222-3333"), + orcidEntry("From Orcid 2 Author", REFERENCE, "0000-2222-3333-4444"), + orcidEntry("From Orcid 3 Author", REFERENCE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -237,9 +237,9 @@ public void testWithPagination() throws Exception { affiliationEntry(author_2, "Author 2", ""), affiliationEntry(author_3, "Author 3", "OrgUnit_2::" + id(orgUnit_2)), affiliationEntry(author_4, "Author 4", "OrgUnit_1::" + id(orgUnit_1)), - orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) + orcidEntry("From Orcid 1 Author", REFERENCE, "0000-1111-2222-3333"), + orcidEntry("From Orcid 2 Author", REFERENCE, "0000-2222-3333-4444"), + orcidEntry("From Orcid 3 Author", REFERENCE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(10))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -285,7 +285,7 @@ public void testWithPagination() throws Exception { affiliationEntry(author_2, "Author 2", ""), affiliationEntry(author_3, "Author 3", "OrgUnit_2::" + id(orgUnit_2)), affiliationEntry(author_4, "Author 4", "OrgUnit_1::" + id(orgUnit_1)), - orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333")))) + orcidEntry("From Orcid 1 Author", REFERENCE, "0000-1111-2222-3333")))) .andExpect(jsonPath("$.page.size", Matchers.is(5))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(2))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -298,8 +298,8 @@ public void testWithPagination() throws Exception { .param("size", "5")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( - orcidEntry("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) + orcidEntry("From Orcid 2 Author", REFERENCE, "0000-2222-3333-4444"), + orcidEntry("From Orcid 3 Author", REFERENCE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(5))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(2))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -312,7 +312,7 @@ public void testWithPagination() throws Exception { .param("size", "6")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( - orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) + orcidEntry("From Orcid 3 Author", REFERENCE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(6))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(2))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -460,8 +460,8 @@ public void testWithoutClientIdConfiguration() throws Exception { .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( affiliationEntry(author_1, "Author 1", ""), affiliationEntry(author_2, "Author 2", ""), - orcidEntry("Author From Orcid 1", GENERATE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", GENERATE, "0000-2222-3333-4444")))) + orcidEntry("From Orcid 1 Author", GENERATE, "0000-1111-2222-3333"), + orcidEntry("From Orcid 2 Author", GENERATE, "0000-2222-3333-4444")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(4))); @@ -500,8 +500,8 @@ public void testWithoutClientSecretConfiguration() throws Exception { .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( affiliationEntry(author_1, "Author 1", ""), affiliationEntry(author_2, "Author 2", ""), - orcidEntry("Author From Orcid 1", GENERATE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", GENERATE, "0000-2222-3333-4444")))) + orcidEntry("From Orcid 1 Author", GENERATE, "0000-1111-2222-3333"), + orcidEntry("From Orcid 2 Author", GENERATE, "0000-2222-3333-4444")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(4))); @@ -533,9 +533,9 @@ public void testWithComposedName() throws Exception { .param("filter", "John Bruce Wayne")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( - orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) + orcidEntry("From Orcid 1 Author", REFERENCE, "0000-1111-2222-3333"), + orcidEntry("From Orcid 2 Author", REFERENCE, "0000-2222-3333-4444"), + orcidEntry("From Orcid 3 Author", REFERENCE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(3))); @@ -568,9 +568,9 @@ public void testWithLastNameAndFirstNameSeparatedByComma() throws Exception { .param("filter", "Wayne, Bruce")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( - orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) + orcidEntry("From Orcid 1 Author", REFERENCE, "0000-1111-2222-3333"), + orcidEntry("From Orcid 2 Author", REFERENCE, "0000-2222-3333-4444"), + orcidEntry("From Orcid 3 Author", REFERENCE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(3))); @@ -581,6 +581,107 @@ public void testWithLastNameAndFirstNameSeparatedByComma() throws Exception { } + + @Test + public void testWithLatinValueLastNameAndFirstNameSeparatedByComma() throws Exception { + + String expectedQuery = "(given-names:Wayne+OR+family-name:Wayne+OR+other-names:Wayne)" + + "+AND+(given-names:Bruce+OR+family-name:Bruce+OR+other-names:Bruce)"; + + when(orcidClientMock.expandedSearch(eq(READ_PUBLIC_TOKEN), eq(expectedQuery), anyInt(), anyInt())) + .thenReturn(expandedSearch(0l, List.of())); + + List orcidSearchResults = List.of(expandedResult("Vincenzo", "Mecca", "0000-1111-2222-3333")); + + when(orcidClientMock.expandedSearch(READ_PUBLIC_TOKEN, expectedQuery, 0, 20)) + .thenReturn(expandedSearch(1, orcidSearchResults)); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(get("/api/submission/vocabularies/AuthorAuthority/entries") + .param("filter", "Wayne, Bruce")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( + orcidEntry("Mecca, Vincenzo", REFERENCE, "0000-1111-2222-3333")))) + .andExpect(jsonPath("$.page.size", Matchers.is(20))) + .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))); + + verify(orcidClientMock).getReadPublicAccessToken(); + verify(orcidClientMock).expandedSearch(READ_PUBLIC_TOKEN, expectedQuery, 0, 20); + verifyNoMoreInteractions(orcidClientMock); + + } + + @Test + public void testWithNonLatinValueLastNameAndFirstNameSeparatedByComma() throws Exception { + + String expectedQuery = "(given-names:Wayne+OR+family-name:Wayne+OR+other-names:Wayne)" + + "+AND+(given-names:Bruce+OR+family-name:Bruce+OR+other-names:Bruce)"; + + when(orcidClientMock.expandedSearch(eq(READ_PUBLIC_TOKEN), eq(expectedQuery), anyInt(), anyInt())) + .thenReturn(expandedSearch(0l, List.of())); + + List orcidSearchResults = List.of( + expandedResult("Vins", "@4Science", "0000-1111-2222-3333"), + expandedResult("V1n5", "M3cc4", "0000-4444-5555-6666") + ); + + when(orcidClientMock.expandedSearch(READ_PUBLIC_TOKEN, expectedQuery, 0, 20)) + .thenReturn(expandedSearch(2, orcidSearchResults)); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(get("/api/submission/vocabularies/AuthorAuthority/entries") + .param("filter", "Wayne, Bruce")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( + orcidEntry("@4science Vins", REFERENCE, "0000-1111-2222-3333"), + orcidEntry("M3cc4 V1n5", REFERENCE, "0000-4444-5555-6666") + ))) + .andExpect(jsonPath("$.page.size", Matchers.is(20))) + .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(2))); + + verify(orcidClientMock).getReadPublicAccessToken(); + verify(orcidClientMock).expandedSearch(READ_PUBLIC_TOKEN, expectedQuery, 0, 20); + verifyNoMoreInteractions(orcidClientMock); + } + + @Test + public void testWithMultipleValueLastNameAndFirstNameSeparatedByComma() throws Exception { + + String expectedQuery = "(given-names:Wayne+OR+family-name:Wayne+OR+other-names:Wayne)" + + "+AND+(given-names:Bruce+OR+family-name:Bruce+OR+other-names:Bruce)"; + + when(orcidClientMock.expandedSearch(eq(READ_PUBLIC_TOKEN), eq(expectedQuery), anyInt(), anyInt())) + .thenReturn(expandedSearch(0l, List.of())); + + List orcidSearchResults = List.of( + expandedResult("Vincenzo", "Mecca", "0000-1111-2222-3333"), + expandedResult("Vins", "@4Science", "0000-4444-5555-6666"), + expandedResult("V1n5", "M3cc4", "0000-7777-8888-9999") + ); + + when(orcidClientMock.expandedSearch(READ_PUBLIC_TOKEN, expectedQuery, 0, 20)) + .thenReturn(expandedSearch(3, orcidSearchResults)); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(get("/api/submission/vocabularies/AuthorAuthority/entries") + .param("filter", "Wayne, Bruce")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( + orcidEntry("Mecca, Vincenzo", REFERENCE, "0000-1111-2222-3333"), + orcidEntry("@4science Vins", REFERENCE, "0000-4444-5555-6666"), + orcidEntry("M3cc4 V1n5", REFERENCE, "0000-7777-8888-9999") + ))) + .andExpect(jsonPath("$.page.size", Matchers.is(20))) + .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(3))); + + verify(orcidClientMock).getReadPublicAccessToken(); + verify(orcidClientMock).expandedSearch(READ_PUBLIC_TOKEN, expectedQuery, 0, 20); + verifyNoMoreInteractions(orcidClientMock); + } + @Test public void testWithAffiliationExtra() throws Exception { @@ -599,9 +700,9 @@ public void testWithAffiliationExtra() throws Exception { .param("filter", "author")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( - orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), - orcidEntryWithInstitution("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444", "Org1, Org2"), - orcidEntryWithInstitution("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777", "Organization")))) + orcidEntry("From Orcid 1 Author", REFERENCE, "0000-1111-2222-3333"), + orcidEntryWithAffiliation("From Orcid 2 Author", REFERENCE, "0000-2222-3333-4444", "Org1, Org2"), + orcidEntryWithAffiliation("From Orcid 3 Author", REFERENCE, "0000-5555-6666-7777", "Organization")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(3))); @@ -669,8 +770,11 @@ private Item buildPerson(String title, Item affiliation) { private Matcher affiliationEntry(Item item, String title, String otherInfoValue) { return ItemAuthorityMatcher.matchItemAuthorityWithOtherInformations(id(item), title, - title, "vocabularyEntry", Map.of("data-oairecerif_author_affiliation", otherInfoValue, - "oairecerif_author_affiliation", otherInfoValue)); + title, "vocabularyEntry", Map.of( + "data-oairecerif_author_affiliation", otherInfoValue, + "oairecerif_author_affiliation", otherInfoValue, + "data-" + ORCID_INFO, "", + ORCID_INFO, "")); } private Matcher orcidEntry(String title, String authorityPrefix, String orcid) { @@ -679,11 +783,18 @@ private Matcher orcidEntry(String title, String authorityPrefix, title, "vocabularyEntry", ORCID_INFO, orcid); } - private Matcher orcidEntryWithInstitution(String title, String authorityPrefix, - String orcid, String institutions) { + private Matcher orcidEntryWithAffiliation(String title, String authorityPrefix, + String orcid, String affiliation) { String authority = authorityPrefix + "ORCID::" + orcid; - return ItemAuthorityMatcher.matchItemAuthorityWithTwoMetadataInOtherInformations(authority, title, - title, "vocabularyEntry", ORCID_INFO, orcid, ORCID_INSTITUTION, institutions); + return ItemAuthorityMatcher.matchItemAuthorityWithTwoMetadataInOtherInformations( + authority, title, title, "vocabularyEntry", + Map.of( + "data-" + ORCID_INFO, orcid, + ORCID_INFO, orcid, + "data-oairecerif_author_affiliation", affiliation, + "oairecerif_author_affiliation", affiliation + ) + ); } private String id(Item item) { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidExternalSourcesIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidExternalSourcesIT.java index 04592c17da2d..2e797f9ce4ab 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidExternalSourcesIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidExternalSourcesIT.java @@ -150,10 +150,10 @@ public void findOneExternalSourcesMockitoTest() throws Exception { OrcidRestConnector orcidConnector = Mockito.mock(OrcidRestConnector.class); OrcidRestConnector realConnector = orcidV3AuthorDataProvider.getOrcidRestConnector(); orcidV3AuthorDataProvider.setOrcidRestConnector(orcidConnector); - when(orcidConnector.get(ArgumentMatchers.endsWith("/person"), ArgumentMatchers.any())) + when(orcidConnector.get(ArgumentMatchers.matches("^\\d{4}-\\d{4}-\\d{4}-\\d{4}$"), ArgumentMatchers.any())) .thenAnswer(new Answer() { public InputStream answer(InvocationOnMock invocation) { - return getClass().getResourceAsStream("orcid-person-record.xml"); + return getClass().getResourceAsStream("orcid-record.xml"); } }); @@ -193,10 +193,10 @@ public InputStream answer(InvocationOnMock invocation) { return getClass().getResourceAsStream("orcid-search.xml"); } }); - when(orcidConnector.get(ArgumentMatchers.endsWith("/person"), ArgumentMatchers.any())) + when(orcidConnector.get(ArgumentMatchers.matches("^\\d{4}-\\d{4}-\\d{4}-\\d{4}$"), ArgumentMatchers.any())) .thenAnswer(new Answer() { public InputStream answer(InvocationOnMock invocation) { - return getClass().getResourceAsStream("orcid-person-record.xml"); + return getClass().getResourceAsStream("orcid-record.xml"); } }); String q = "orcid:0000-0002-9029-1854"; @@ -246,10 +246,10 @@ public InputStream answer(InvocationOnMock invocation) { return getClass().getResourceAsStream("orcid-search.xml"); } }); - when(orcidConnector.get(ArgumentMatchers.endsWith("/person"), ArgumentMatchers.any())) + when(orcidConnector.get(ArgumentMatchers.matches("^\\d{4}-\\d{4}-\\d{4}-\\d{4}$"), ArgumentMatchers.any())) .thenAnswer(new Answer() { public InputStream answer(InvocationOnMock invocation) { - return getClass().getResourceAsStream("orcid-person-record.xml"); + return getClass().getResourceAsStream("orcid-record.xml"); } }); String q = "family-name:bollini AND given-names:andrea"; diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidLoginFilterIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidLoginFilterIT.java index 4b441b1bc8fc..5b167050780f 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidLoginFilterIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidLoginFilterIT.java @@ -10,9 +10,11 @@ import static java.util.Arrays.asList; import static org.dspace.app.matcher.MetadataValueMatcher.with; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.emptyString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -21,6 +23,7 @@ import static org.mockito.Mockito.when; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.cookie; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.redirectedUrl; @@ -29,11 +32,14 @@ import java.sql.SQLException; import java.text.ParseException; import java.util.UUID; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import javax.servlet.http.Cookie; import com.jayway.jsonpath.JsonPath; import com.nimbusds.jose.JOSEException; import com.nimbusds.jwt.SignedJWT; +import org.dspace.app.rest.matcher.MetadataMatcher; import org.dspace.app.rest.model.AuthnRest; import org.dspace.app.rest.security.OrcidLoginFilter; import org.dspace.app.rest.security.jwt.EPersonClaimProvider; @@ -46,14 +52,16 @@ import org.dspace.content.Item; import org.dspace.content.service.ItemService; import org.dspace.eperson.EPerson; +import org.dspace.eperson.RegistrationTypeEnum; import org.dspace.eperson.service.EPersonService; +import org.dspace.eperson.service.RegistrationDataService; import org.dspace.orcid.OrcidToken; import org.dspace.orcid.client.OrcidClient; import org.dspace.orcid.exception.OrcidClientException; import org.dspace.orcid.model.OrcidTokenResponseDTO; import org.dspace.orcid.service.OrcidTokenService; import org.dspace.services.ConfigurationService; -import org.dspace.util.UUIDUtils; +import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -104,6 +112,9 @@ public class OrcidLoginFilterIT extends AbstractControllerIntegrationTest { @Autowired private OrcidTokenService orcidTokenService; + @Autowired + private RegistrationDataService registrationDataService; + @Before public void setup() { originalOrcidClient = orcidAuthentication.getOrcidClient(); @@ -137,45 +148,76 @@ public void testNoRedirectIfOrcidDisabled() throws Exception { @Test public void testEPersonCreationViaOrcidLogin() throws Exception { - when(orcidClientMock.getAccessToken(CODE)).thenReturn(buildOrcidTokenResponse(ORCID, ACCESS_TOKEN)); - when(orcidClientMock.getPerson(ACCESS_TOKEN, ORCID)).thenReturn(buildPerson("Test", "User", "test@email.it")); - - MvcResult mvcResult = getClient().perform(get("/api/" + AuthnRest.CATEGORY + "/orcid") - .param("code", CODE)) - .andExpect(status().is3xxRedirection()) - .andExpect(redirectedUrl(configurationService.getProperty("dspace.ui.url"))) - .andExpect(cookie().exists("Authorization-cookie")) - .andReturn(); - - verify(orcidClientMock).getAccessToken(CODE); - verify(orcidClientMock).getPerson(ACCESS_TOKEN, ORCID); - verifyNoMoreInteractions(orcidClientMock); - - String ePersonId = getEPersonIdFromAuthorizationCookie(mvcResult); - - createdEperson = ePersonService.find(context, UUIDUtils.fromString(ePersonId)); - assertThat(createdEperson, notNullValue()); - assertThat(createdEperson.getEmail(), equalTo("test@email.it")); - assertThat(createdEperson.getFullName(), equalTo("Test User")); - assertThat(createdEperson.getNetid(), equalTo(ORCID)); - assertThat(createdEperson.canLogIn(), equalTo(true)); - assertThat(createdEperson.getMetadata(), hasItem(with("eperson.orcid", ORCID))); - assertThat(createdEperson.getMetadata(), hasItem(with("eperson.orcid.scope", ORCID_SCOPES[0], 0))); - assertThat(createdEperson.getMetadata(), hasItem(with("eperson.orcid.scope", ORCID_SCOPES[1], 1))); - - assertThat(getOrcidAccessToken(createdEperson), is(ACCESS_TOKEN)); + String defaultProp = configurationService.getProperty("orcid.registration-data.url"); + configurationService.setProperty("orcid.registration-data.url", "/test-redirect?random-token={0}"); + try { + when(orcidClientMock.getAccessToken(CODE)).thenReturn(buildOrcidTokenResponse(ORCID, ACCESS_TOKEN)); + when(orcidClientMock.getPerson(ACCESS_TOKEN, ORCID)).thenReturn( + buildPerson("Test", "User", "test@email.it")); + + MvcResult mvcResult = + getClient().perform(get("/api/" + AuthnRest.CATEGORY + "/orcid").param("code", CODE)) + .andExpect(status().is3xxRedirection()) + .andReturn(); + + String redirectedUrl = mvcResult.getResponse().getRedirectedUrl(); + assertThat(redirectedUrl, not(emptyString())); + + verify(orcidClientMock).getAccessToken(CODE); + verify(orcidClientMock).getPerson(ACCESS_TOKEN, ORCID); + verifyNoMoreInteractions(orcidClientMock); + + final Pattern pattern = Pattern.compile("test-redirect\\?random-token=([a-zA-Z0-9]+)"); + final Matcher matcher = pattern.matcher(redirectedUrl); + matcher.find(); + + assertThat(matcher.groupCount(), is(1)); + assertThat(matcher.group(1), not(emptyString())); + + String rdToken = matcher.group(1); + + getClient().perform(get("/api/eperson/registration/search/findByToken") + .param("token", rdToken)) + .andExpect(status().is2xxSuccessful()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.netId", equalTo(ORCID))) + .andExpect(jsonPath("$.registrationType", equalTo(RegistrationTypeEnum.ORCID.toString()))) + .andExpect(jsonPath("$.email", equalTo("test@email.it"))) + .andExpect( + jsonPath("$.registrationMetadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("eperson.orcid", ORCID), + MetadataMatcher.matchMetadata("eperson.firstname", "Test"), + MetadataMatcher.matchMetadata("eperson.lastname", "User") + ) + ) + ); + } finally { + configurationService.setProperty("orcid.registration-data.url", defaultProp); + } } @Test - public void testEPersonCreationViaOrcidLoginWithoutEmail() throws Exception { + public void testRedirectiViaOrcidLoginWithoutEmail() throws Exception { when(orcidClientMock.getAccessToken(CODE)).thenReturn(buildOrcidTokenResponse(ORCID, ACCESS_TOKEN)); when(orcidClientMock.getPerson(ACCESS_TOKEN, ORCID)).thenReturn(buildPerson("Test", "User")); - getClient().perform(get("/api/" + AuthnRest.CATEGORY + "/orcid") - .param("code", CODE)) - .andExpect(status().is3xxRedirection()) - .andExpect(redirectedUrl("http://localhost:4000/error?status=401&code=orcid.generic-error")); + MvcResult orcidLogin = + getClient().perform(get("/api/" + AuthnRest.CATEGORY + "/orcid").param("code", CODE)) + .andExpect(status().is3xxRedirection()) + .andReturn(); + + String redirectedUrl = orcidLogin.getResponse().getRedirectedUrl(); + + assertThat(redirectedUrl, notNullValue()); + + final Pattern pattern = Pattern.compile("external-login/([a-zA-Z0-9]+)"); + final Matcher matcher = pattern.matcher(redirectedUrl); + matcher.find(); + + assertThat(matcher.groupCount(), is(1)); + assertThat(matcher.group(1), not(emptyString())); verify(orcidClientMock).getAccessToken(CODE); verify(orcidClientMock).getPerson(ACCESS_TOKEN, ORCID); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PatchWithAuthorityIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PatchWithAuthorityIT.java index 21d48c1f2309..ddda2c97cc57 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PatchWithAuthorityIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PatchWithAuthorityIT.java @@ -28,6 +28,10 @@ import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.content.WorkspaceItem; +import org.dspace.content.authority.factory.ContentAuthorityServiceFactory; +import org.dspace.content.authority.service.MetadataAuthorityService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.junit.Test; /** @@ -40,6 +44,12 @@ public class PatchWithAuthorityIT extends AbstractControllerIntegrationTest { private WorkspaceItem workspaceItem; + private final ConfigurationService configurationService = DSpaceServicesFactory + .getInstance().getConfigurationService(); + + private final MetadataAuthorityService metadataAuthorityService = ContentAuthorityServiceFactory + .getInstance().getMetadataAuthorityService(); + @Override public void setUp() throws Exception { super.setUp(); @@ -62,22 +72,30 @@ public void setUp() throws Exception { public void addValueFromControlledVocabularyHasAuthorityStored() throws Exception { String authToken = getAuthToken(admin.getEmail(), password); - MetadataValueRest value = new MetadataValueRest("dataset"); - value.setAuthority("c_ddb1"); - value.setConfidence(600); - List operations = - singletonList(new AddOperation("/sections/publication/dc.type", - singletonList(value))); - - getClient(authToken).perform(patch("/api/submission/workspaceitems/" + workspaceItem.getID()) - .contentType(MediaType.APPLICATION_JSON) - .content(getPatchContent(operations))) - .andExpect(status().isOk()); - - Item item = context.reloadEntity(workspaceItem).getItem(); - - assertThat(item.getMetadata(), hasItem(with("dc.type", "dataset", null, - "c_ddb1", 0, 600))); + try { + configurationService.setProperty("authority.controlled.dc.type", "true"); + metadataAuthorityService.clearCache(); + + MetadataValueRest value = new MetadataValueRest("dataset"); + value.setAuthority("c_ddb1"); + value.setConfidence(600); + List operations = + singletonList(new AddOperation("/sections/publication/dc.type", + singletonList(value))); + + getClient(authToken).perform(patch("/api/submission/workspaceitems/" + workspaceItem.getID()) + .contentType(MediaType.APPLICATION_JSON) + .content(getPatchContent(operations))) + .andExpect(status().isOk()); + + Item item = context.reloadEntity(workspaceItem).getItem(); + + assertThat(item.getMetadata(), hasItem(with("dc.type", "dataset", null, + "c_ddb1", 0, 600))); + } finally { + configurationService.setProperty("authority.controlled.dc.type", "false"); + metadataAuthorityService.clearCache(); + } } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java new file mode 100644 index 000000000000..b5c67c640fff --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java @@ -0,0 +1,624 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import java.io.InputStream; +import java.util.UUID; + +import org.apache.commons.codec.CharEncoding; +import org.apache.commons.io.IOUtils; +import org.dspace.app.rest.matcher.BitstreamMatcher; +import org.dspace.app.rest.matcher.BundleMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.BundleBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.BundleService; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Tests for the PrimaryBitstreamController + */ +public class PrimaryBitstreamControllerIT extends AbstractControllerIntegrationTest { + + @Autowired + BundleService bundleService; + @Autowired + BitstreamService bitstreamService; + + Item item; + Bitstream bitstream; + Bundle bundle; + Community community; + Collection collection; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + + context.turnOffAuthorisationSystem(); + community = CommunityBuilder.createCommunity(context).build(); + collection = CollectionBuilder.createCollection(context, community).build(); + item = ItemBuilder.createItem(context, collection).build(); + + // create bitstream in ORIGINAL bundle of item + String bitstreamContent = "TEST CONTENT"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withMimeType("text/plain") + .build(); + } + bundle = item.getBundles("ORIGINAL").get(0); + context.restoreAuthSystemState(); + } + + @Test + public void testGetPrimaryBitstream() throws Exception { + bundle.setPrimaryBitstreamID(bitstream); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(get(getBundlePrimaryBitstreamUrl(bundle.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", BitstreamMatcher.matchProperties(bitstream))); + } + + @Test + public void testGetPrimaryBitstreamBundleNotFound() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(get(getBundlePrimaryBitstreamUrl(UUID.randomUUID()))) + .andExpect(status().isNotFound()); + } + + @Test + public void testGetPrimaryBitstreamNonExisting() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(get(getBundlePrimaryBitstreamUrl(bundle.getID()))) + .andExpect(status().isNoContent()) + .andExpect(jsonPath("$").doesNotExist()); + } + + @Test + public void testPostPrimaryBitstream() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle.getName(), bundle.getID(), + bundle.getHandle(), bundle.getType()))); + // verify primaryBitstream was actually added + bundle = context.reloadEntity(bundle); + Assert.assertEquals(bitstream, bundle.getPrimaryBitstream()); + } + + @Test + public void testPostPrimaryBitstreamBundleNotFound() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(UUID.randomUUID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isNotFound()); + // verify primaryBitstream is still null + bundle = context.reloadEntity(bundle); + Assert.assertNull(bundle.getPrimaryBitstream()); + } + + @Test + public void testPostPrimaryBitstreamInvalidBitstream() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(UUID.randomUUID()))) + .andExpect(status().isUnprocessableEntity()); + // verify primaryBitstream is still null + bundle = context.reloadEntity(bundle); + Assert.assertNull(bundle.getPrimaryBitstream()); + } + + @Test + public void testPostPrimaryBitstreamAlreadyExists() throws Exception { + context.turnOffAuthorisationSystem(); + bundle.setPrimaryBitstreamID(bitstream); + Bitstream bitstream2 = createBitstream(bundle); + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isBadRequest()); + // verify primaryBitstream is still the original one + bundle = context.reloadEntity(bundle); + Assert.assertEquals(bitstream, bundle.getPrimaryBitstream()); + } + + @Test + public void testPostPrimaryBitstreamNotInBundle() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle2 = BundleBuilder.createBundle(context, item).withName("Bundle2").build(); + Bitstream bitstream2 = createBitstream(bundle2); + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isUnprocessableEntity()); + // verify primaryBitstream is still null + bundle = context.reloadEntity(bundle); + Assert.assertNull(bundle.getPrimaryBitstream()); + } + + @Test + public void testPostPrimaryBitstreamCommunityAdmin() throws Exception { + // create new structure with Admin permissions on Community + context.turnOffAuthorisationSystem(); + Community com2 = CommunityBuilder.createCommunity(context).withAdminGroup(eperson).build(); + Collection col2 = CollectionBuilder.createCollection(context, com2).build(); + Item item2 = ItemBuilder.createItem(context, col2).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle2.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle2.getName(), bundle2.getID(), + bundle2.getHandle(), bundle2.getType()))); + // verify primaryBitstream was actually added + bundle2 = context.reloadEntity(bundle2); + Assert.assertEquals(bitstream2, bundle2.getPrimaryBitstream()); + + // verify Community Admin can't set a primaryBitstream outside their own Community + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testPostPrimaryBitstreamCollectionAdmin() throws Exception { + // create new structure with Admin permissions on Collection + context.turnOffAuthorisationSystem(); + Collection col2 = CollectionBuilder.createCollection(context, community).withAdminGroup(eperson).build(); + Item item2 = ItemBuilder.createItem(context, col2).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle2.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle2.getName(), bundle2.getID(), + bundle2.getHandle(), bundle2.getType()))); + // verify primaryBitstream was actually added + bundle2 = context.reloadEntity(bundle2); + Assert.assertEquals(bitstream2, bundle2.getPrimaryBitstream()); + + // verify Collection Admin can't set a primaryBitstream outside their own Collection + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testPostPrimaryBitstreamItemAdmin() throws Exception { + // create new structure with Admin permissions on Item + context.turnOffAuthorisationSystem(); + Item item2 = ItemBuilder.createItem(context, collection).withAdminUser(eperson).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle2.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle2.getName(), bundle2.getID(), + bundle2.getHandle(), bundle2.getType()))); + // verify primaryBitstream was actually added + bundle2 = context.reloadEntity(bundle2); + Assert.assertEquals(bitstream2, bundle2.getPrimaryBitstream()); + + // verify Item Admin can't set a primaryBitstream outside their own Item + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testPostPrimaryBitstreamForbidden() throws Exception { + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testPostPrimaryBitstreamUnauthenticated() throws Exception { + getClient().perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isUnauthorized()); + } + + @Test + public void testUpdatePrimaryBitstream() throws Exception { + context.turnOffAuthorisationSystem(); + bundle.setPrimaryBitstreamID(bitstream); + Bitstream bitstream2 = createBitstream(bundle); + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle.getName(), bundle.getID(), + bundle.getHandle(), bundle.getType()))); + // verify primaryBitstream was actually updated + bundle = context.reloadEntity(bundle); + Assert.assertEquals(bitstream2, bundle.getPrimaryBitstream()); + } + + @Test + public void testUpdatePrimaryBitstreamBundleNotFound() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(UUID.randomUUID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isNotFound()); + } + + @Test + public void testUpdatePrimaryBitstreamInvalidBitstream() throws Exception { + bundle.setPrimaryBitstreamID(bitstream); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(UUID.randomUUID()))) + .andExpect(status().isUnprocessableEntity()); + // verify primaryBitstream is still the original one + bundle = context.reloadEntity(bundle); + Assert.assertEquals(bitstream, bundle.getPrimaryBitstream()); + } + + @Test + public void testUpdatePrimaryBitstreamNonExisting() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isBadRequest()); + // verify primaryBitstream is still null + bundle = context.reloadEntity(bundle); + Assert.assertNull(bundle.getPrimaryBitstream()); + } + + @Test + public void testUpdatePrimaryBitstreamNotInBundle() throws Exception { + context.turnOffAuthorisationSystem(); + bundle.setPrimaryBitstreamID(bitstream); + Bundle bundle2 = BundleBuilder.createBundle(context, item).withName("Bundle2").build(); + Bitstream bitstream2 = createBitstream(bundle2); + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isUnprocessableEntity()); + // verify primaryBitstream is still the original one + bundle = context.reloadEntity(bundle); + Assert.assertEquals(bitstream, bundle.getPrimaryBitstream()); + } + + @Test + public void testUpdatePrimaryBitstreamCommunityAdmin() throws Exception { + // create new structure with Admin permissions on Community + context.turnOffAuthorisationSystem(); + Community com2 = CommunityBuilder.createCommunity(context).withAdminGroup(eperson).build(); + Collection col2 = CollectionBuilder.createCollection(context, com2).build(); + Item item2 = ItemBuilder.createItem(context, col2).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + bundle2.setPrimaryBitstreamID(bitstream2); + Bitstream bitstream3 = createBitstream(bundle2); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle2.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream3.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle2.getName(), bundle2.getID(), + bundle2.getHandle(), bundle2.getType()))); + // verify primaryBitstream was actually updated + bundle2 = context.reloadEntity(bundle2); + Assert.assertEquals(bitstream3, bundle2.getPrimaryBitstream()); + + bundle.setPrimaryBitstreamID(bitstream); + // verify Community Admin can't update a primaryBitstream outside their own Community + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testUpdatePrimaryBitstreamCollectionAdmin() throws Exception { + // create new structure with Admin permissions on Collection + context.turnOffAuthorisationSystem(); + Collection col2 = CollectionBuilder.createCollection(context, community).withAdminGroup(eperson).build(); + Item item2 = ItemBuilder.createItem(context, col2).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + bundle2.setPrimaryBitstreamID(bitstream2); + Bitstream bitstream3 = createBitstream(bundle2); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle2.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream3.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle2.getName(), bundle2.getID(), + bundle2.getHandle(), bundle2.getType()))); + // verify primaryBitstream was actually updated + bundle2 = context.reloadEntity(bundle2); + Assert.assertEquals(bitstream3, bundle2.getPrimaryBitstream()); + + bundle.setPrimaryBitstreamID(bitstream); + // verify Collection Admin can't update a primaryBitstream outside their own Collection + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testUpdatePrimaryBitstreamItemAdmin() throws Exception { + // create new structure with Admin permissions on Item + context.turnOffAuthorisationSystem(); + Item item2 = ItemBuilder.createItem(context, collection).withAdminUser(eperson).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + bundle2.setPrimaryBitstreamID(bitstream2); + Bitstream bitstream3 = createBitstream(bundle2); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle2.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream3.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle2.getName(), bundle2.getID(), + bundle2.getHandle(), bundle2.getType()))); + // verify primaryBitstream was actually updated + bundle2 = context.reloadEntity(bundle2); + Assert.assertEquals(bitstream3, bundle2.getPrimaryBitstream()); + + bundle.setPrimaryBitstreamID(bitstream); + // verify Item Admin can't update a primaryBitstream outside their own Item + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testUpdatePrimaryBitstreamForbidden() throws Exception { + context.turnOffAuthorisationSystem(); + bundle.setPrimaryBitstreamID(bitstream); + Bitstream bitstream2 = createBitstream(bundle); + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testUpdatePrimaryBitstreamUnauthenticated() throws Exception { + context.turnOffAuthorisationSystem(); + bundle.setPrimaryBitstreamID(bitstream); + Bitstream bitstream2 = createBitstream(bundle); + context.restoreAuthSystemState(); + + getClient().perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isUnauthorized()); + } + + @Test + public void testDeletePrimaryBitstream() throws Exception { + bundle.setPrimaryBitstreamID(bitstream); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle.getID()))) + .andExpect(status().isNoContent()); + // verify primaryBitstream was actually deleted + bundle = context.reloadEntity(bundle); + Assert.assertNull(bundle.getPrimaryBitstream()); + // verify bitstream itself still exists + Assert.assertEquals(1, bundle.getBitstreams().size()); + Assert.assertEquals(bitstream, bundle.getBitstreams().get(0)); + } + + @Test + public void testDeletePrimaryBitstreamBundleNotFound() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(UUID.randomUUID()))) + .andExpect(status().isNotFound()); + } + + @Test + public void testDeletePrimaryBitstreamBundleNonExisting() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle.getID()))) + .andExpect(status().isBadRequest()); + // verify primaryBitstream is still null + bundle = context.reloadEntity(bundle); + Assert.assertNull(bundle.getPrimaryBitstream()); + } + + @Test + public void testDeletePrimaryBitstreamCommunityAdmin() throws Exception { + // create new structure with Admin permissions on Community + context.turnOffAuthorisationSystem(); + Community com2 = CommunityBuilder.createCommunity(context).withAdminGroup(eperson).build(); + Collection col2 = CollectionBuilder.createCollection(context, com2).build(); + Item item2 = ItemBuilder.createItem(context, col2).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + bundle2.setPrimaryBitstreamID(bitstream2); + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle2.getID()))) + .andExpect(status().isNoContent()); + // verify primaryBitstream was actually deleted + bundle2 = context.reloadEntity(bundle2); + Assert.assertNull(bundle2.getPrimaryBitstream()); + // verify bitstream itself still exists + Assert.assertEquals(1, bundle2.getBitstreams().size()); + Assert.assertEquals(bitstream2, bundle2.getBitstreams().get(0)); + + bundle.setPrimaryBitstreamID(bitstream); + // verify Community Admin can't delete a primaryBitstream outside their own Community + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testDeletePrimaryBitstreamCollectionAdmin() throws Exception { + // create new structure with Admin permissions on Collection + context.turnOffAuthorisationSystem(); + Collection col2 = CollectionBuilder.createCollection(context, community).withAdminGroup(eperson).build(); + Item item2 = ItemBuilder.createItem(context, col2).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + bundle2.setPrimaryBitstreamID(bitstream2); + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle2.getID()))) + .andExpect(status().isNoContent()); + // verify primaryBitstream was actually deleted + bundle2 = context.reloadEntity(bundle2); + Assert.assertNull(bundle2.getPrimaryBitstream()); + // verify bitstream itself still exists + Assert.assertEquals(1, bundle2.getBitstreams().size()); + Assert.assertEquals(bitstream2, bundle2.getBitstreams().get(0)); + + bundle.setPrimaryBitstreamID(bitstream); + // verify Collection Admin can't delete a primaryBitstream outside their own Collection + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testDeletePrimaryBitstreamItemAdmin() throws Exception { + // create new structure with Admin permissions on Item + context.turnOffAuthorisationSystem(); + Item item2 = ItemBuilder.createItem(context, collection).withAdminUser(eperson).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + bundle2.setPrimaryBitstreamID(bitstream2); + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle2.getID()))) + .andExpect(status().isNoContent()); + // verify primaryBitstream was actually deleted + bundle2 = context.reloadEntity(bundle2); + Assert.assertNull(bundle2.getPrimaryBitstream()); + // verify bitstream itself still exists + Assert.assertEquals(1, bundle2.getBitstreams().size()); + Assert.assertEquals(bitstream2, bundle2.getBitstreams().get(0)); + + bundle.setPrimaryBitstreamID(bitstream); + // verify Item Admin can't delete a primaryBitstream outside their own Item + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testDeletePrimaryBitstreamForbidden() throws Exception { + bundle.setPrimaryBitstreamID(bitstream); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testDeletePrimaryBitstreamUnauthenticated() throws Exception { + bundle.setPrimaryBitstreamID(bitstream); + + getClient().perform(delete(getBundlePrimaryBitstreamUrl(bundle.getID()))) + .andExpect(status().isUnauthorized()); + } + + private String getBundlePrimaryBitstreamUrl(UUID uuid) { + return "/api/core/bundles/" + uuid + "/primaryBitstream"; + } + + private String getBitstreamUrl(UUID uuid) { + return "/api/core/bitstreams/" + uuid; + } + + private Bitstream createBitstream(Bundle bundle) throws Exception { + String bitstreamContent = "Bitstream Content"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + return BitstreamBuilder.createBitstream(context, bundle, is) + .withName("Bitstream") + .withMimeType("text/plain") + .build(); + } + } +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java index 6cf4d58df799..305f8aac7fe8 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java @@ -243,22 +243,35 @@ public void getAllProcessesTestStartingUser() throws Exception { @Test public void getProcessFiles() throws Exception { + context.setCurrentUser(eperson); Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build(); - try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendFile(context, process, is, "inputfile", "test.csv"); + processService.appendFile(context, newProcess, is, "inputfile", "test.csv"); } - Bitstream bitstream = processService.getBitstream(context, process, "inputfile"); + Bitstream bitstream = processService.getBitstream(context, newProcess, "inputfile"); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/files")) + getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/files")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.files[0].name", is("test.csv"))) .andExpect(jsonPath("$._embedded.files[0].uuid", is(bitstream.getID().toString()))) .andExpect(jsonPath("$._embedded.files[0].metadata['dspace.process.filetype']" + "[0].value", is("inputfile"))); - + getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + .andExpect(status().isOk()); + // also the user that triggered the process should be able to access the process' files + String epersonToken = getAuthToken(eperson.getEmail(), password); + getClient(epersonToken) + .perform(get("/api/system/processes/" + newProcess.getID() + "/files")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.files[0].name", is("test.csv"))) + .andExpect(jsonPath("$._embedded.files[0].uuid", is(bitstream.getID().toString()))) + .andExpect(jsonPath("$._embedded.files[0].metadata['dspace.process.filetype']" + + "[0].value", is("inputfile"))); + getClient(epersonToken) + .perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + .andExpect(status().isOk()); } @Test @@ -266,25 +279,34 @@ public void getProcessFilesByFileType() throws Exception { Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build(); try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendFile(context, process, is, "inputfile", "test.csv"); + processService.appendFile(context, newProcess, is, "inputfile", "test.csv"); } - Bitstream bitstream = processService.getBitstream(context, process, "inputfile"); + Bitstream bitstream = processService.getBitstream(context, newProcess, "inputfile"); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/files/inputfile")) + getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/files/inputfile")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.bitstreams[0].name", is("test.csv"))) + .andExpect(jsonPath("$._embedded.bitstreams[0].uuid", is(bitstream.getID().toString()))) + .andExpect(jsonPath("$._embedded.bitstreams[0].metadata['dspace.process.filetype']" + + "[0].value", is("inputfile"))); + // also the user that triggered the process should be able to access the process' files + String epersonToken = getAuthToken(eperson.getEmail(), password); + getClient(epersonToken) + .perform(get("/api/system/processes/" + newProcess.getID() + "/files/inputfile")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.bitstreams[0].name", is("test.csv"))) .andExpect(jsonPath("$._embedded.bitstreams[0].uuid", is(bitstream.getID().toString()))) .andExpect(jsonPath("$._embedded.bitstreams[0].metadata['dspace.process.filetype']" + "[0].value", is("inputfile"))); - } @Test public void getProcessFilesTypes() throws Exception { + Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build(); try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendFile(context, process, is, "inputfile", "test.csv"); + processService.appendFile(context, newProcess, is, "inputfile", "test.csv"); } List fileTypesToCheck = new LinkedList<>(); @@ -292,12 +314,18 @@ public void getProcessFilesTypes() throws Exception { String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/filetypes")) + getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/filetypes")) .andExpect(status().isOk()) .andExpect(jsonPath("$", ProcessFileTypesMatcher - .matchProcessFileTypes("filetypes-" + process.getID(), fileTypesToCheck))); - + .matchProcessFileTypes("filetypes-" + newProcess.getID(), fileTypesToCheck))); + // also the user that triggered the process should be able to access the process' files + String epersonToken = getAuthToken(eperson.getEmail(), password); + getClient(epersonToken) + .perform(get("/api/system/processes/" + newProcess.getID() + "/filetypes")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ProcessFileTypesMatcher + .matchProcessFileTypes("filetypes-" + newProcess.getID(), fileTypesToCheck))); } @Test @@ -806,30 +834,44 @@ public void searchProcessTestByUserSortedOnNonExistingIsSortedAsDefault() throws .andExpect(status().isBadRequest()); } - /** - * Test get process output by admin created by himself - */ @Test public void getProcessOutput() throws Exception { + context.setCurrentUser(eperson); + Process process1 = ProcessBuilder.createProcess(context, eperson, "mock-script", parameters) + .withStartAndEndTime("10/01/1990", "20/01/1990") + .build(); + try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendLog(process.getID(), process.getName(), "testlog", ProcessLogLevel.INFO); + processService.appendLog(process1.getID(), process1.getName(), "testlog", ProcessLogLevel.INFO); } - processService.createLogBitstream(context, process); + processService.createLogBitstream(context, process1); List fileTypesToCheck = new LinkedList<>(); fileTypesToCheck.add("inputfile"); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/output")) + getClient(token).perform(get("/api/system/processes/" + process1.getID() + "/output")) .andExpect(status().isOk()) .andExpect(jsonPath("$.name", - is(process.getName() + process.getID() + ".log"))) + is(process1.getName() + process1.getID() + ".log"))) .andExpect(jsonPath("$.type", is("bitstream"))) .andExpect(jsonPath("$.metadata['dc.title'][0].value", - is(process.getName() + process.getID() + ".log"))) + is(process1.getName() + process1.getID() + ".log"))) .andExpect(jsonPath("$.metadata['dspace.process.filetype'][0].value", is("script_output"))); + String epersonToken = getAuthToken(eperson.getEmail(), password); + + getClient(epersonToken) + .perform(get("/api/system/processes/" + process1.getID() + "/output")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.name", + is(process1.getName() + process1.getID() + ".log"))) + .andExpect(jsonPath("$.type", is("bitstream"))) + .andExpect(jsonPath("$.metadata['dc.title'][0].value", + is(process1.getName() + process1.getID() + ".log"))) + .andExpect(jsonPath("$.metadata['dspace.process.filetype'][0].value", + is("script_output"))); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java new file mode 100644 index 000000000000..10bd4f6582cd --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java @@ -0,0 +1,238 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.when; + +import java.io.InputStream; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.apache.commons.io.IOUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.impl.client.CloseableHttpClient; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.dspace.importer.external.pubmed.service.PubmedImportMetadataSourceServiceImpl; +import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.Mockito; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Integration tests for {@link PubmedImportMetadataSourceServiceImpl} + *
      + * Metadata configuration in {@code pubmed-integration.xml} + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class PubmedImportMetadataSourceServiceIT extends AbstractLiveImportIntegrationTest { + + @Autowired + private PubmedImportMetadataSourceServiceImpl pubmedImportMetadataServiceImpl; + + @Autowired + private LiveImportClientImpl liveImportClientImpl; + + @Test + public void pubmedImportMetadataGetRecordsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + try (InputStream fetchFile = getClass().getResourceAsStream("pubmedimport-fetch-test.xml"); + InputStream searchFile = getClass().getResourceAsStream("pubmedimport-search-test.xml")) { + liveImportClientImpl.setHttpClient(httpClient); + + CloseableHttpResponse fetchResponse = mockResponse( + IOUtils.toString(fetchFile, Charset.defaultCharset()), 200, "OK"); + CloseableHttpResponse searchResponse = mockResponse( + IOUtils.toString(searchFile, Charset.defaultCharset()), 200, "OK"); + + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(fetchResponse).thenReturn(searchResponse); + + context.restoreAuthSystemState(); + ArrayList collection2match = getRecords(); + Collection recordsImported = pubmedImportMetadataServiceImpl.getRecords("test query", 0, 1); + assertEquals(1, recordsImported.size()); + matchRecords(new ArrayList(recordsImported), collection2match); + } finally { + liveImportClientImpl.setHttpClient(originalHttpClient); + } + } + + @Test + public void pubmedImportMetadataGetRecords2Test() throws Exception { + context.turnOffAuthorisationSystem(); + + CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + try (InputStream fetchFile = getClass().getResourceAsStream("pubmedimport-fetch-test2.xml"); + InputStream searchFile = getClass().getResourceAsStream("pubmedimport-search-test2.xml")) { + liveImportClientImpl.setHttpClient(httpClient); + + CloseableHttpResponse fetchResponse = mockResponse( + IOUtils.toString(fetchFile, Charset.defaultCharset()), 200, "OK"); + CloseableHttpResponse searchResponse = mockResponse( + IOUtils.toString(searchFile, Charset.defaultCharset()), 200, "OK"); + + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(fetchResponse).thenReturn(searchResponse); + + context.restoreAuthSystemState(); + ArrayList collection2match = getRecords2(); + Collection recordsImported = pubmedImportMetadataServiceImpl.getRecords("test query", 0, 1); + assertEquals(1, recordsImported.size()); + matchRecords(new ArrayList(recordsImported), collection2match); + } finally { + liveImportClientImpl.setHttpClient(originalHttpClient); + } + } + + private ArrayList getRecords() { + ArrayList records = new ArrayList<>(); + List metadatums = new ArrayList(); + //define first record + MetadatumDTO title = createMetadatumDTO("dc","title", null, + "Teaching strategies of clinical reasoning in advanced nursing clinical practice: A scoping review."); + MetadatumDTO description1 = createMetadatumDTO("dc", "description", "abstract", "To report and synthesize" + + " the main strategies for teaching clinical reasoning described in the literature in the context of" + + " advanced clinical practice and promote new areas of research to improve the pedagogical approach" + + " to clinical reasoning in Advanced Practice Nursing."); + MetadatumDTO description2 = createMetadatumDTO("dc", "description", "abstract", "Clinical reasoning and" + + " clinical thinking are essential elements in the advanced nursing clinical practice decision-making" + + " process. The quality improvement of care is related to the development of those skills." + + " Therefore, it is crucial to optimize teaching strategies that can enhance the role of clinical" + + " reasoning in advanced clinical practice."); + MetadatumDTO description3 = createMetadatumDTO("dc", "description", "abstract", "A scoping review was" + + " conducted using the framework developed by Arksey and O'Malley as a research strategy." + + " Consistent with the nature of scoping reviews, a study protocol has been established."); + MetadatumDTO description4 = createMetadatumDTO("dc", "description", "abstract", "The studies included and" + + " analyzed in this scoping review cover from January 2016 to June 2022. Primary studies and secondary" + + " revision studies, published in biomedical databases, were selected, including qualitative ones." + + " Electronic databases used were: CINAHL, PubMed, Cochrane Library, Scopus, and OVID." + + " Three authors independently evaluated the articles for titles, abstracts, and full text."); + MetadatumDTO description5 = createMetadatumDTO("dc", "description", "abstract", "1433 articles were examined," + + " applying the eligibility and exclusion criteria 73 studies were assessed for eligibility," + + " and 27 were included in the scoping review. The results that emerged from the review were" + + " interpreted and grouped into three macro strategies (simulations-based education, art and visual" + + " thinking, and other learning approaches) and nineteen educational interventions."); + MetadatumDTO description6 = createMetadatumDTO("dc", "description", "abstract", "Among the different" + + " strategies, the simulations are the most used. Despite this, our scoping review reveals that is" + + " necessary to use different teaching strategies to stimulate critical thinking, improve diagnostic" + + " reasoning, refine clinical judgment, and strengthen decision-making. However, it is not possible to" + + " demonstrate which methodology is more effective in obtaining the learning outcomes necessary to" + + " acquire an adequate level of judgment and critical thinking. Therefore, it will be" + + " necessary to relate teaching methodologies with the skills developed."); + MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "pmid", "36708638"); + MetadatumDTO author1 = createMetadatumDTO("dc", "contributor", "author", "Giuffrida, Silvia"); + MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Silano, Verdiana"); + MetadatumDTO author3 = createMetadatumDTO("dc", "contributor", "author", "Ramacciati, Nicola"); + MetadatumDTO author4 = createMetadatumDTO("dc", "contributor", "author", "Prandi, Cesarina"); + MetadatumDTO author5 = createMetadatumDTO("dc", "contributor", "author", "Baldon, Alessia"); + MetadatumDTO author6 = createMetadatumDTO("dc", "contributor", "author", "Bianchi, Monica"); + MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2023-02"); + MetadatumDTO language = createMetadatumDTO("dc", "language", "iso", "en"); + MetadatumDTO subject1 = createMetadatumDTO("dc", "subject", null, "Advanced practice nursing"); + MetadatumDTO subject2 = createMetadatumDTO("dc", "subject", null, "Clinical reasoning"); + MetadatumDTO subject3 = createMetadatumDTO("dc", "subject", null, "Critical thinking"); + MetadatumDTO subject4 = createMetadatumDTO("dc", "subject", null, "Educational strategies"); + MetadatumDTO subject5 = createMetadatumDTO("dc", "subject", null, "Nursing education"); + MetadatumDTO subject6 = createMetadatumDTO("dc", "subject", null, "Teaching methodology"); + MetadatumDTO relationIsPartOf = createMetadatumDTO("dc", "relation", "ispartof", "Nurse education in practice"); + MetadatumDTO relationIssn = createMetadatumDTO("dc", "relation", "issn", "1873-5223"); + MetadatumDTO relationCitVolume = createMetadatumDTO("oaire", "citation", "volume", "67"); + MetadatumDTO doiIdentifier = createMetadatumDTO("dc", "identifier", "doi", "10.1016/j.nepr.2023.103548"); + MetadatumDTO dcType = createMetadatumDTO("dc", "type", null, "text::journal::journal article"); + MetadatumDTO dcType2 = createMetadatumDTO("dc", "type", null, "text::review"); + + metadatums.add(title); + metadatums.add(description1); + metadatums.add(description2); + metadatums.add(description3); + metadatums.add(description4); + metadatums.add(description5); + metadatums.add(description6); + metadatums.add(identifierOther); + metadatums.add(author1); + metadatums.add(author2); + metadatums.add(author3); + metadatums.add(author4); + metadatums.add(author5); + metadatums.add(author6); + metadatums.add(date); + metadatums.add(language); + metadatums.add(subject1); + metadatums.add(subject2); + metadatums.add(subject3); + metadatums.add(subject4); + metadatums.add(subject5); + metadatums.add(subject6); + metadatums.add(relationIsPartOf); + metadatums.add(relationIssn); + metadatums.add(relationCitVolume); + metadatums.add(doiIdentifier); + metadatums.add(dcType); + metadatums.add(dcType2); + ImportRecord record = new ImportRecord(metadatums); + + records.add(record); + return records; + } + + private ArrayList getRecords2() { + ArrayList records = new ArrayList<>(); + List metadatums = new ArrayList(); + //define first record + MetadatumDTO title = createMetadatumDTO("dc","title", null, "Searching NCBI Databases Using Entrez."); + MetadatumDTO description = createMetadatumDTO("dc", "description", "abstract", "One of the most widely" + + " used interfaces for the retrieval of information from biological databases is the NCBI Entrez" + + " system. Entrez capitalizes on the fact that there are pre-existing, logical relationships between" + + " the individual entries found in numerous public databases. The existence of such natural" + + " connections, mostly biological in nature, argued for the development of a method through which" + + " all the information about a particular biological entity could be found without having to" + + " sequentially visit and query disparate databases. Two basic protocols describe simple, text-based" + + " searches, illustrating the types of information that can be retrieved through the Entrez system." + + " An alternate protocol builds upon the first basic protocol, using additional," + + " built-in features of the Entrez system, and providing alternative ways to issue the initial query." + + " The support protocol reviews how to save frequently issued queries. Finally, Cn3D, a structure" + + " visualization tool, is also discussed."); + MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "pmid", "21975942"); + MetadatumDTO author1 = createMetadatumDTO("dc", "contributor", "author", "Gibney, Gretchen"); + MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Baxevanis, Andreas D"); + MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2011-10"); + MetadatumDTO language = createMetadatumDTO("dc", "language", "iso", "en"); + MetadatumDTO relationIsPartOf = + createMetadatumDTO("dc", "relation", "ispartof", "Current protocols in human genetics"); + MetadatumDTO relationIssn = createMetadatumDTO("dc", "relation", "issn", "1934-8258"); + MetadatumDTO relationCitVolume = createMetadatumDTO("oaire", "citation", "volume", "Chapter 6"); + MetadatumDTO doiIdentifier = createMetadatumDTO("dc", "identifier", "doi", "10.1002/0471142905.hg0610s71"); + MetadatumDTO dcType = createMetadatumDTO("dc", "type", null, "text::journal::journal article"); + + metadatums.add(title); + metadatums.add(description); + metadatums.add(identifierOther); + metadatums.add(author1); + metadatums.add(author2); + metadatums.add(date); + metadatums.add(language); + metadatums.add(relationIsPartOf); + metadatums.add(relationIssn); + metadatums.add(relationCitVolume); + metadatums.add(doiIdentifier); + metadatums.add(dcType); + ImportRecord record = new ImportRecord(metadatums); + + records.add(record); + return records; + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RegistrationRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RegistrationRestRepositoryIT.java index d597b68a550f..4cdbb6c11061 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RegistrationRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RegistrationRestRepositoryIT.java @@ -7,21 +7,32 @@ */ package org.dspace.app.rest; +import static org.dspace.app.rest.repository.RegistrationRestRepository.TOKEN_QUERY_PARAM; import static org.dspace.app.rest.repository.RegistrationRestRepository.TYPE_FORGOT; import static org.dspace.app.rest.repository.RegistrationRestRepository.TYPE_QUERY_PARAM; import static org.dspace.app.rest.repository.RegistrationRestRepository.TYPE_REGISTER; +import static org.hamcrest.Matchers.emptyOrNullString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; +import java.sql.SQLException; import java.util.Iterator; import java.util.List; import javax.servlet.http.HttpServletResponse; @@ -30,17 +41,30 @@ import org.apache.commons.lang3.StringUtils; import org.dspace.app.rest.matcher.RegistrationMatcher; import org.dspace.app.rest.model.RegistrationRest; +import org.dspace.app.rest.model.patch.AddOperation; +import org.dspace.app.rest.model.patch.ReplaceOperation; import org.dspace.app.rest.repository.RegistrationRestRepository; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.authorize.AuthorizeException; import org.dspace.builder.EPersonBuilder; +import org.dspace.core.Email; import org.dspace.eperson.CaptchaServiceImpl; +import org.dspace.eperson.EPerson; import org.dspace.eperson.InvalidReCaptchaException; import org.dspace.eperson.RegistrationData; +import org.dspace.eperson.RegistrationTypeEnum; import org.dspace.eperson.dao.RegistrationDataDAO; import org.dspace.eperson.service.CaptchaService; +import org.dspace.eperson.service.RegistrationDataService; import org.dspace.services.ConfigurationService; import org.hamcrest.Matchers; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.BeforeClass; import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.MockedStatic; +import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Autowired; public class RegistrationRestRepositoryIT extends AbstractControllerIntegrationTest { @@ -50,9 +74,31 @@ public class RegistrationRestRepositoryIT extends AbstractControllerIntegrationT @Autowired private RegistrationDataDAO registrationDataDAO; @Autowired + private RegistrationDataService registrationDataService; + @Autowired private ConfigurationService configurationService; @Autowired private RegistrationRestRepository registrationRestRepository; + private static MockedStatic emailMockedStatic; + + @After + public void tearDown() throws Exception { + Iterator iterator = registrationDataDAO.findAll(context, RegistrationData.class).iterator(); + while (iterator.hasNext()) { + RegistrationData registrationData = iterator.next(); + registrationDataDAO.delete(context, registrationData); + } + } + + @BeforeClass + public static void init() throws Exception { + emailMockedStatic = Mockito.mockStatic(Email.class); + } + + @AfterClass + public static void tearDownClass() throws Exception { + emailMockedStatic.close(); + } @Test public void findByTokenTestExistingUserTest() throws Exception { @@ -226,7 +272,7 @@ public void testRegisterDomainNotRegistered() throws Exception { } @Test - public void testRegisterDomainNotRegisteredMailAddressRegistred() throws Exception { + public void testRegisterMailAddressRegistered() throws Exception { List registrationDataList = registrationDataDAO.findAll(context, RegistrationData.class); try { context.turnOffAuthorisationSystem(); @@ -236,7 +282,7 @@ public void testRegisterDomainNotRegisteredMailAddressRegistred() throws Excepti .withCanLogin(true) .build(); context.restoreAuthSystemState(); - configurationService.setProperty("authentication-password.domain.valid", "test.com"); + RegistrationRest registrationRest = new RegistrationRest(); registrationRest.setEmail(email); @@ -245,9 +291,10 @@ public void testRegisterDomainNotRegisteredMailAddressRegistred() throws Excepti .param(TYPE_QUERY_PARAM, TYPE_REGISTER) .content(mapper.writeValueAsBytes(registrationRest)) .contentType(contentType)) - .andExpect(status().isUnprocessableEntity()); + .andExpect(status().isCreated()); registrationDataList = registrationDataDAO.findAll(context, RegistrationData.class); - assertEquals(0, registrationDataList.size()); + assertEquals(1, registrationDataList.size()); + assertTrue(StringUtils.equalsIgnoreCase(registrationDataList.get(0).getEmail(), email)); } finally { Iterator iterator = registrationDataList.iterator(); while (iterator.hasNext()) { @@ -298,6 +345,7 @@ public void registrationFlowWithNoHeaderCaptchaTokenTest() throws Exception { // when reCAPTCHA enabled and request doesn't contain "X-Recaptcha-Token” header getClient().perform(post("/api/eperson/registrations") + .param(TYPE_QUERY_PARAM, TYPE_REGISTER) .content(mapper.writeValueAsBytes(registrationRest)) .contentType(contentType)) .andExpect(status().isForbidden()); @@ -442,4 +490,507 @@ public void accountEndpoint_WrongAccountTypeParam() throws Exception { .andExpect(status().isBadRequest()); } + @Test + public void givenRegistrationData_whenPatchInvalidValue_thenUnprocessableEntityResponse() + throws Exception { + + ObjectMapper mapper = new ObjectMapper(); + RegistrationRest registrationRest = new RegistrationRest(); + registrationRest.setEmail(eperson.getEmail()); + registrationRest.setUser(eperson.getID()); + + Email spy = Mockito.spy(Email.class); + doNothing().when(spy).send(); + + emailMockedStatic.when(() -> Email.getEmail(any())).thenReturn(spy); + + // given RegistrationData with email + getClient().perform(post("/api/eperson/registrations") + .param(TYPE_QUERY_PARAM, TYPE_REGISTER) + .content(mapper.writeValueAsBytes(registrationRest)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + RegistrationData registrationData = + registrationDataService.findByEmail(context, registrationRest.getEmail()); + + assertThat(registrationData, notNullValue()); + assertThat(registrationData.getToken(), not(emptyOrNullString())); + + String token = registrationData.getToken(); + String newMail = null; + String patchContent = getPatchContent( + List.of(new ReplaceOperation("/email", newMail)) + ); + + // when patch for replace email + getClient().perform(patch("/api/eperson/registrations/" + registrationData.getID()) + .param(TOKEN_QUERY_PARAM, token) + .content(patchContent) + .contentType(contentType)) + // then succesful response returned + .andExpect(status().isBadRequest()); + + newMail = "test@email.com"; + patchContent = getPatchContent( + List.of(new AddOperation("/email", newMail)) + ); + + getClient().perform(patch("/api/eperson/registrations/" + registrationData.getID()) + .param(TOKEN_QUERY_PARAM, token) + .content(patchContent) + .contentType(contentType)) + // then succesful response returned + .andExpect(status().isUnprocessableEntity()); + + newMail = "invalidemail!!!!"; + patchContent = getPatchContent( + List.of(new ReplaceOperation("/email", newMail)) + ); + + getClient().perform(patch("/api/eperson/registrations/" + registrationData.getID()) + .param(TOKEN_QUERY_PARAM, token) + .content(patchContent) + .contentType(contentType)) + // then succesful response returned + .andExpect(status().isUnprocessableEntity()); + } + + @Test + public void givenRegistrationData_whenPatchWithInvalidToken_thenUnprocessableEntityResponse() + throws Exception { + + ObjectMapper mapper = new ObjectMapper(); + RegistrationRest registrationRest = new RegistrationRest(); + registrationRest.setEmail(eperson.getEmail()); + registrationRest.setUser(eperson.getID()); + + Email spy = Mockito.spy(Email.class); + doNothing().when(spy).send(); + + emailMockedStatic.when(() -> Email.getEmail(any())).thenReturn(spy); + + // given RegistrationData with email + getClient().perform(post("/api/eperson/registrations") + .param(TYPE_QUERY_PARAM, TYPE_REGISTER) + .content(mapper.writeValueAsBytes(registrationRest)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + RegistrationData registrationData = + registrationDataService.findByEmail(context, registrationRest.getEmail()); + + + assertThat(registrationData, notNullValue()); + assertThat(registrationData.getToken(), not(emptyOrNullString())); + + String token = null; + String newMail = "validemail@email.com"; + String patchContent = getPatchContent( + List.of(new ReplaceOperation("/email", newMail)) + ); + + // when patch for replace email + getClient().perform(patch("/api/eperson/registrations/" + registrationData.getID()) + .param(TOKEN_QUERY_PARAM, token) + .content(patchContent) + .contentType(contentType)) + // then succesful response returned + .andExpect(status().isUnauthorized()); + + token = "notexistingtoken"; + + // when patch for replace email + getClient().perform(patch("/api/eperson/registrations/" + registrationData.getID()) + .param(TOKEN_QUERY_PARAM, token) + .content(patchContent) + .contentType(contentType)) + // then succesful response returned + .andExpect(status().isUnauthorized()); + + context.turnOffAuthorisationSystem(); + registrationData = context.reloadEntity(registrationData); + registrationDataService.markAsExpired(context, registrationData); + context.commit(); + context.restoreAuthSystemState(); + + registrationData = context.reloadEntity(registrationData); + + assertThat(registrationData.getExpires(), notNullValue()); + + token = registrationData.getToken(); + newMail = "validemail@email.com"; + patchContent = getPatchContent( + List.of(new ReplaceOperation("/email", newMail)) + ); + + // when patch for replace email + getClient().perform(patch("/api/eperson/registrations/" + registrationData.getID()) + .param(TOKEN_QUERY_PARAM, token) + .content(patchContent) + .contentType(contentType)) + // then succesful response returned + .andExpect(status().isUnauthorized()); + } + + @Test + public void givenRegistrationDataWithEmail_whenPatchForReplaceEmail_thenSuccessfullResponse() + throws Exception { + + ObjectMapper mapper = new ObjectMapper(); + RegistrationRest registrationRest = new RegistrationRest(); + registrationRest.setEmail(eperson.getEmail()); + registrationRest.setUser(eperson.getID()); + + // given RegistrationData with email + getClient().perform(post("/api/eperson/registrations") + .param(TYPE_QUERY_PARAM, TYPE_REGISTER) + .content(mapper.writeValueAsBytes(registrationRest)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + RegistrationData registrationData = + registrationDataService.findByEmail(context, registrationRest.getEmail()); + + assertThat(registrationData, notNullValue()); + assertThat(registrationData.getToken(), not(emptyOrNullString())); + + String token = registrationData.getToken(); + String newMail = "vincenzo.mecca@4science.com"; + String patchContent = getPatchContent( + List.of(new ReplaceOperation("/email", newMail)) + ); + + // when patch for replace email + getClient().perform(patch("/api/eperson/registrations/" + registrationData.getID()) + .param(TOKEN_QUERY_PARAM, token) + .content(patchContent) + .contentType(contentType)) + // then succesful response returned + .andExpect(status().is2xxSuccessful()); + } + + @Test + public void givenRegistrationDataWithoutEmail_whenPatchForAddEmail_thenSuccessfullResponse() + throws Exception { + + RegistrationData registrationData = + createNewRegistrationData("0000-1111-2222-3333", RegistrationTypeEnum.ORCID); + + assertThat(registrationData, notNullValue()); + assertThat(registrationData.getToken(), not(emptyOrNullString())); + + String token = registrationData.getToken(); + String newMail = "vincenzo.mecca@4science.com"; + String patchContent = getPatchContent( + List.of(new AddOperation("/email", newMail)) + ); + + // when patch for replace email + getClient().perform(patch("/api/eperson/registrations/" + registrationData.getID()) + .param(TOKEN_QUERY_PARAM, token) + .content(patchContent) + .contentType(contentType)) + // then succesful response returned + .andExpect(status().is2xxSuccessful()); + } + + @Test + public void givenRegistrationDataWithEmail_whenPatchForReplaceEmail_thenNewRegistrationDataCreated() + throws Exception { + + ObjectMapper mapper = new ObjectMapper(); + RegistrationRest registrationRest = new RegistrationRest(); + registrationRest.setEmail(eperson.getEmail()); + registrationRest.setUser(eperson.getID()); + + // given RegistrationData with email + getClient().perform(post("/api/eperson/registrations") + .param(TYPE_QUERY_PARAM, TYPE_REGISTER) + .content(mapper.writeValueAsBytes(registrationRest)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + RegistrationData registrationData = + registrationDataService.findByEmail(context, registrationRest.getEmail()); + + assertThat(registrationData, notNullValue()); + assertThat(registrationData.getToken(), not(emptyOrNullString())); + + String token = registrationData.getToken(); + String newMail = "vincenzo.mecca@4science.com"; + String patchContent = getPatchContent( + List.of(new ReplaceOperation("/email", newMail)) + ); + + // when patch for replace email + getClient().perform(patch("/api/eperson/registrations/" + registrationData.getID()) + .param(TOKEN_QUERY_PARAM, token) + .content(patchContent) + .contentType(contentType)) + .andExpect(status().is2xxSuccessful()); + + // then email updated with new registration + RegistrationData newRegistration = registrationDataService.findByEmail(context, newMail); + assertThat(newRegistration, notNullValue()); + assertThat(newRegistration.getToken(), not(emptyOrNullString())); + assertThat(newRegistration.getEmail(), equalTo(newMail)); + + assertThat(newRegistration.getEmail(), not(equalTo(registrationData.getEmail()))); + assertThat(newRegistration.getToken(), not(equalTo(registrationData.getToken()))); + + registrationData = context.reloadEntity(registrationData); + assertThat(registrationData, nullValue()); + } + + @Test + public void givenRegistrationDataWithoutEmail_whenPatchForReplaceEmail_thenNewRegistrationDataCreated() + throws Exception { + RegistrationData registrationData = + createNewRegistrationData("0000-1111-2222-3333", RegistrationTypeEnum.ORCID); + + assertThat(registrationData.getToken(), not(emptyOrNullString())); + + String token = registrationData.getToken(); + String newMail = "vincenzo.mecca@4science.com"; + String patchContent = getPatchContent( + List.of(new AddOperation("/email", newMail)) + ); + + // when patch for replace email + getClient().perform(patch("/api/eperson/registrations/" + registrationData.getID()) + .param(TOKEN_QUERY_PARAM, token) + .content(patchContent) + .contentType(contentType)) + .andExpect(status().is2xxSuccessful()); + + // then email updated with new registration + RegistrationData newRegistration = registrationDataService.findByEmail(context, newMail); + assertThat(newRegistration, notNullValue()); + assertThat(newRegistration.getToken(), not(emptyOrNullString())); + assertThat(newRegistration.getEmail(), equalTo(newMail)); + + assertThat(newRegistration.getEmail(), not(equalTo(registrationData.getEmail()))); + assertThat(newRegistration.getToken(), not(equalTo(registrationData.getToken()))); + + registrationData = context.reloadEntity(registrationData); + assertThat(registrationData, nullValue()); + } + + @Test + public void givenRegistrationDataWithoutEmail_whenPatchForAddEmail_thenExternalLoginSent() throws Exception { + RegistrationData registrationData = + createNewRegistrationData("0000-1111-2222-3333", RegistrationTypeEnum.ORCID); + + assertThat(registrationData, notNullValue()); + assertThat(registrationData.getToken(), not(emptyOrNullString())); + + String token = registrationData.getToken(); + String newMail = "vincenzo.mecca@4science.com"; + String patchContent = getPatchContent( + List.of(new AddOperation("/email", newMail)) + ); + + Email spy = Mockito.spy(Email.class); + doNothing().when(spy).send(); + + emailMockedStatic.when(() -> Email.getEmail(any())).thenReturn(spy); + + // when patch for replace email + getClient().perform(patch("/api/eperson/registrations/" + registrationData.getID()) + .param(TOKEN_QUERY_PARAM, token) + .content(patchContent) + .contentType(contentType)) + .andExpect(status().is2xxSuccessful()); + + // then verification email sent + verify(spy, times(1)).addRecipient(newMail); + verify(spy).addArgument( + ArgumentMatchers.contains( + RegistrationTypeEnum.ORCID.getLink() + ) + ); + verify(spy, times(1)).send(); + } + + @Test + public void givenRegistrationDataWithEmail_whenPatchForNewEmail_thenExternalLoginSent() throws Exception { + RegistrationData registrationData = + createNewRegistrationData("0000-1111-2222-3333", RegistrationTypeEnum.ORCID); + + String token = registrationData.getToken(); + String newMail = "vincenzo.mecca@orcid.com"; + String patchContent = getPatchContent( + List.of(new AddOperation("/email", newMail)) + ); + + Email spy = Mockito.spy(Email.class); + doNothing().when(spy).send(); + + emailMockedStatic.when(() -> Email.getEmail(any())).thenReturn(spy); + + // when patch for replace email + getClient().perform(patch("/api/eperson/registrations/" + registrationData.getID()) + .param(TOKEN_QUERY_PARAM, token) + .content(patchContent) + .contentType(contentType)) + .andExpect(status().is2xxSuccessful()); + + verify(spy, times(1)).addRecipient(newMail); + verify(spy).addArgument( + ArgumentMatchers.contains( + registrationData.getRegistrationType().getLink() + ) + ); + verify(spy, times(1)).send(); + + registrationData = registrationDataService.findByEmail(context, newMail); + + assertThat(registrationData, notNullValue()); + assertThat(registrationData.getToken(), not(emptyOrNullString())); + + token = registrationData.getToken(); + newMail = "vincenzo.mecca@4science.com"; + patchContent = getPatchContent( + List.of(new ReplaceOperation("/email", newMail)) + ); + + spy = Mockito.spy(Email.class); + doNothing().when(spy).send(); + + emailMockedStatic.when(() -> Email.getEmail(any())).thenReturn(spy); + + // when patch for replace email + getClient().perform(patch("/api/eperson/registrations/" + registrationData.getID()) + .param(TOKEN_QUERY_PARAM, token) + .content(patchContent) + .contentType(contentType)) + .andExpect(status().is2xxSuccessful()); + + // then verification email sent + verify(spy, times(1)).addRecipient(newMail); + verify(spy).addArgument( + ArgumentMatchers.contains( + registrationData.getRegistrationType().getLink() + ) + ); + verify(spy, times(1)).send(); + } + + @Test + public void givenRegistrationDataWithEmail_whenPatchForExistingEPersonEmail_thenReviewAccountLinkSent() + throws Exception { + ObjectMapper mapper = new ObjectMapper(); + RegistrationRest registrationRest = new RegistrationRest(); + registrationRest.setEmail(eperson.getEmail()); + registrationRest.setNetId("0000-0000-0000-0000"); + + // given RegistrationData with email + getClient().perform(post("/api/eperson/registrations") + .param(TYPE_QUERY_PARAM, TYPE_REGISTER) + .content(mapper.writeValueAsBytes(registrationRest)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + RegistrationData registrationData = + registrationDataService.findByEmail(context, registrationRest.getEmail()); + + assertThat(registrationData, notNullValue()); + assertThat(registrationData.getToken(), not(emptyOrNullString())); + + context.turnOffAuthorisationSystem(); + final EPerson vins = + EPersonBuilder.createEPerson(context) + .withEmail("vincenzo.mecca@4science.com") + .withNameInMetadata("Vincenzo", "Mecca") + .withOrcid("0101-0101-0101-0101") + .build(); + context.restoreAuthSystemState(); + + String token = registrationData.getToken(); + String vinsEmail = vins.getEmail(); + String patchContent = getPatchContent( + List.of(new ReplaceOperation("/email", vins.getEmail())) + ); + + Email spy = Mockito.spy(Email.class); + doNothing().when(spy).send(); + + emailMockedStatic.when(() -> Email.getEmail(any())).thenReturn(spy); + + // when patch for replace email + getClient().perform(patch("/api/eperson/registrations/" + registrationData.getID()) + .param(TOKEN_QUERY_PARAM, token) + .content(patchContent) + .contentType(contentType)) + .andExpect(status().is2xxSuccessful()); + + // then verification email sent + verify(spy, times(1)).addRecipient(vinsEmail); + verify(spy).addArgument( + ArgumentMatchers.contains( + RegistrationTypeEnum.VALIDATION_ORCID.getLink() + ) + ); + verify(spy, times(1)).send(); + } + + @Test + public void givenRegistrationDataWithoutEmail_whenPatchForExistingAccount_thenReviewAccountSent() throws Exception { + RegistrationData registrationData = + createNewRegistrationData("0000-1111-2222-3333", RegistrationTypeEnum.ORCID); + + assertThat(registrationData, notNullValue()); + assertThat(registrationData.getToken(), not(emptyOrNullString())); + + context.turnOffAuthorisationSystem(); + final EPerson vins = + EPersonBuilder.createEPerson(context) + .withEmail("vincenzo.mecca@4science.com") + .withNameInMetadata("Vincenzo", "Mecca") + .withOrcid("0101-0101-0101-0101") + .build(); + context.commit(); + context.restoreAuthSystemState(); + + String token = registrationData.getToken(); + String vinsEmail = vins.getEmail(); + String patchContent = getPatchContent( + List.of(new AddOperation("/email", vins.getEmail())) + ); + + Email spy = Mockito.spy(Email.class); + doNothing().when(spy).send(); + + emailMockedStatic.when(() -> Email.getEmail(any())).thenReturn(spy); + + // when patch for replace email + getClient().perform(patch("/api/eperson/registrations/" + registrationData.getID()) + .param(TOKEN_QUERY_PARAM, token) + .content(patchContent) + .contentType(contentType)) + .andExpect(status().is2xxSuccessful()); + + // then verification email sent + verify(spy, times(1)).addRecipient(vinsEmail); + verify(spy).addArgument( + ArgumentMatchers.contains( + RegistrationTypeEnum.VALIDATION_ORCID.getLink() + ) + ); + verify(spy, times(1)).send(); + } + + + private RegistrationData createNewRegistrationData( + String netId, RegistrationTypeEnum type + ) throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + RegistrationData registrationData = + registrationDataService.create(context, netId, type); + context.commit(); + context.restoreAuthSystemState(); + return registrationData; + } + } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java index da48a5adf43b..cbaca4707b13 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java @@ -8,14 +8,12 @@ package org.dspace.app.rest; import static com.jayway.jsonpath.JsonPath.read; -import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; import static org.exparity.hamcrest.date.DateMatchers.within; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.text.IsEmptyString.emptyOrNullString; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; @@ -34,6 +32,7 @@ import java.sql.SQLException; import java.time.temporal.ChronoUnit; import java.util.Date; +import java.util.Iterator; import java.util.Map; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; @@ -221,33 +220,34 @@ public void testCreateAndReturnAuthenticated() // Create it and see if it was created correctly. ObjectMapper mapper = new ObjectMapper(); String authToken = getAuthToken(eperson.getEmail(), password); - AtomicReference requestTokenRef = new AtomicReference<>(); try { - getClient(authToken) - .perform(post(URI_ROOT) - .content(mapper.writeValueAsBytes(rir)) - .contentType(contentType)) - .andExpect(status().isCreated()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$", Matchers.allOf( - hasJsonPath("$.id", not(is(emptyOrNullString()))), - hasJsonPath("$.type", is(RequestItemRest.NAME)), - hasJsonPath("$.token", not(is(emptyOrNullString()))), - hasJsonPath("$.requestEmail", is(eperson.getEmail())), - hasJsonPath("$.requestMessage", is(RequestItemBuilder.REQ_MESSAGE)), - hasJsonPath("$.requestName", is(eperson.getFullName())), - hasJsonPath("$.allfiles", is(true)), - // TODO should be an ISO datetime - hasJsonPath("$.requestDate", not(is(emptyOrNullString()))), - hasJsonPath("$._links.self.href", not(is(emptyOrNullString()))) - ))) - .andDo((var result) -> requestTokenRef.set( - read(result.getResponse().getContentAsString(), "token"))); + getClient(authToken) + .perform(post(URI_ROOT) + .content(mapper.writeValueAsBytes(rir)) + .contentType(contentType)) + .andExpect(status().isCreated()) + // verify the body is empty + .andExpect(jsonPath("$").doesNotExist()); } finally { - // Clean up the created request. - RequestItemBuilder.deleteRequestItem(requestTokenRef.get()); + Iterator itemRequests = requestItemService.findByItem(context, item); + String token = null; + for (Iterator it = itemRequests; it.hasNext();) { + RequestItem requestItem = it.next(); + // Find the created request via the eperson email + if (requestItem.getReqEmail().equals(eperson.getEmail())) { + // Verify request data + assertEquals(eperson.getFullName(), requestItem.getReqName()); + assertEquals(item.getID(), requestItem.getItem().getID()); + assertEquals(RequestItemBuilder.REQ_MESSAGE, requestItem.getReqMessage()); + assertEquals(true, requestItem.isAllfiles()); + assertNotNull(requestItem.getToken()); + token = requestItem.getToken(); + } + } + // Cleanup created request + RequestItemBuilder.deleteRequestItem(token); } - } +} /** * Test of createAndReturn method, with an UNauthenticated user. @@ -273,30 +273,32 @@ public void testCreateAndReturnNotAuthenticated() // Create it and see if it was created correctly. ObjectMapper mapper = new ObjectMapper(); - AtomicReference requestTokenRef = new AtomicReference<>(); try { - getClient().perform(post(URI_ROOT) - .content(mapper.writeValueAsBytes(rir)) - .contentType(contentType)) - .andExpect(status().isCreated()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$", Matchers.allOf( - hasJsonPath("$.id", not(is(emptyOrNullString()))), - hasJsonPath("$.type", is(RequestItemRest.NAME)), - hasJsonPath("$.token", not(is(emptyOrNullString()))), - hasJsonPath("$.requestEmail", is(RequestItemBuilder.REQ_EMAIL)), - hasJsonPath("$.requestMessage", is(RequestItemBuilder.REQ_MESSAGE)), - hasJsonPath("$.requestName", is(RequestItemBuilder.REQ_NAME)), - hasJsonPath("$.allfiles", is(false)), - // TODO should be an ISO datetime - hasJsonPath("$.requestDate", not(is(emptyOrNullString()))), - hasJsonPath("$._links.self.href", not(is(emptyOrNullString()))) - ))) - .andDo((var result) -> requestTokenRef.set( - read(result.getResponse().getContentAsString(), "token"))); + getClient().perform(post(URI_ROOT) + .content(mapper.writeValueAsBytes(rir)) + .contentType(contentType)) + .andExpect(status().isCreated()) + // verify the body is empty + .andExpect(jsonPath("$").doesNotExist()); } finally { - // Clean up the created request. - RequestItemBuilder.deleteRequestItem(requestTokenRef.get()); + Iterator itemRequests = requestItemService.findByItem(context, item); + String token = null; + for (Iterator it = itemRequests; it.hasNext();) { + RequestItem requestItem = it.next(); + // Find the created request via the eperson email + if (requestItem.getReqEmail().equals(RequestItemBuilder.REQ_EMAIL)) { + // Verify request data + assertEquals(item.getID(), requestItem.getItem().getID()); + assertEquals(RequestItemBuilder.REQ_MESSAGE, requestItem.getReqMessage()); + assertEquals(RequestItemBuilder.REQ_NAME, requestItem.getReqName()); + assertEquals(bitstream.getID(), requestItem.getBitstream().getID()); + assertEquals(false, requestItem.isAllfiles()); + assertNotNull(requestItem.getToken()); + token = requestItem.getToken(); + } + } + // Cleanup created request + RequestItemBuilder.deleteRequestItem(token); } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RorImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RorImportMetadataSourceServiceIT.java new file mode 100644 index 000000000000..4f8e56f98054 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RorImportMetadataSourceServiceIT.java @@ -0,0 +1,137 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.dspace.app.matcher.LambdaMatcher.matches; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.mockito.Mockito.when; + +import java.io.InputStream; +import java.nio.charset.Charset; +import java.util.Collection; +import java.util.Optional; + +import org.apache.commons.io.IOUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.impl.client.CloseableHttpClient; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl; +import org.dspace.importer.external.ror.service.RorImportMetadataSourceServiceImpl; +import org.hamcrest.Matcher; +import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.Mockito; +import org.springframework.beans.factory.annotation.Autowired; + +public class RorImportMetadataSourceServiceIT extends AbstractLiveImportIntegrationTest { + + @Autowired + private LiveImportClientImpl liveImportClient; + + @Autowired + private RorImportMetadataSourceServiceImpl rorServiceImpl; + + @Test + public void tesGetRecords() throws Exception { + context.turnOffAuthorisationSystem(); + CloseableHttpClient originalHttpClient = liveImportClient.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + + try (InputStream file = getClass().getResourceAsStream("ror-records.json")) { + + String jsonResponse = IOUtils.toString(file, Charset.defaultCharset()); + + liveImportClient.setHttpClient(httpClient); + CloseableHttpResponse response = mockResponse(jsonResponse, 200, "OK"); + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(response); + + context.restoreAuthSystemState(); + Collection recordsImported = rorServiceImpl.getRecords("test query", 0, 2); + assertThat(recordsImported, hasSize(10)); + + ImportRecord record = recordsImported.iterator().next(); + + assertThat(record.getValueList(), hasSize(11)); + + assertThat(record.getSingleValue("dc.title"), is("The University of Texas")); + assertThat(record.getSingleValue("organization.identifier.ror"), is("https://ror.org/02f6dcw23")); + assertThat(record.getSingleValue("oairecerif.acronym"), is("UTHSCSA")); + assertThat(record.getSingleValue("oairecerif.identifier.url"), is("http://www.uthscsa.edu/")); + assertThat(record.getSingleValue("dc.type"), is("Education")); + assertThat(record.getSingleValue("organization.address.addressCountry"), is("US")); + assertThat(record.getSingleValue("organization.foundingDate"), is("1959")); + assertThat(record.getValue("organization", "identifier", "crossrefid"), hasSize(2)); + assertThat(record.getSingleValue("organization.identifier.isni"), is("0000 0001 0629 5880")); + assertThat(record.getSingleValue("organization.parentOrganization"), is("The University of Texas System")); + + } finally { + liveImportClient.setHttpClient(originalHttpClient); + } + } + + @Test + public void tesCount() throws Exception { + context.turnOffAuthorisationSystem(); + CloseableHttpClient originalHttpClient = liveImportClient.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + + try (InputStream file = getClass().getResourceAsStream("ror-records.json")) { + + String jsonResponse = IOUtils.toString(file, Charset.defaultCharset()); + + liveImportClient.setHttpClient(httpClient); + CloseableHttpResponse response = mockResponse(jsonResponse, 200, "OK"); + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(response); + + context.restoreAuthSystemState(); + Integer count = rorServiceImpl.count("test"); + assertThat(count, equalTo(200)); + } finally { + liveImportClient.setHttpClient(originalHttpClient); + } + } + + @Test + public void tesGetRecord() throws Exception { + context.turnOffAuthorisationSystem(); + CloseableHttpClient originalHttpClient = liveImportClient.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + + try (InputStream file = getClass().getResourceAsStream("ror-record.json")) { + + String jsonResponse = IOUtils.toString(file, Charset.defaultCharset()); + + liveImportClient.setHttpClient(httpClient); + CloseableHttpResponse response = mockResponse(jsonResponse, 200, "OK"); + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(response); + + context.restoreAuthSystemState(); + ImportRecord record = rorServiceImpl.getRecord("https://ror.org/01sps7q28"); + assertThat(record.getValueList(), hasSize(9)); + assertThat(record.getSingleValue("dc.title"), is("The University of Texas Health Science Center at Tyler")); + assertThat(record.getSingleValue("organization.identifier.ror"), is("https://ror.org/01sps7q28")); + assertThat(record.getSingleValue("oairecerif.acronym"), is("UTHSCT")); + assertThat(record.getSingleValue("oairecerif.identifier.url"), + is("https://www.utsystem.edu/institutions/university-texas-health-science-center-tyler")); + assertThat(record.getSingleValue("dc.type"), is("Healthcare")); + assertThat(record.getSingleValue("organization.address.addressCountry"), is("US")); + assertThat(record.getSingleValue("organization.foundingDate"), is("1947")); + assertThat(record.getSingleValue("organization.identifier.isni"), is("0000 0000 9704 5790")); + assertThat(record.getSingleValue("organization.parentOrganization"), is("The University of Texas System")); + + } finally { + liveImportClient.setHttpClient(originalHttpClient); + } + } + + private Matcher> is(String value) { + return matches(optionalValue -> optionalValue.isPresent() && optionalValue.get().equals(value)); + } +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RorOrgUnitAuthorityIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RorOrgUnitAuthorityIT.java new file mode 100644 index 000000000000..e9a42e78ec38 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RorOrgUnitAuthorityIT.java @@ -0,0 +1,47 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.dspace.app.rest.matcher.ItemAuthorityMatcher.matchItemAuthorityWithOtherInformations; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import java.util.HashMap; +import java.util.Map; + +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.junit.Test; + +public class RorOrgUnitAuthorityIT extends AbstractControllerIntegrationTest { + + @Test + public void testAuthority() throws Exception { + + Map expectedExtras = new HashMap<>(); + expectedExtras.put("data-ror_orgunit_id", "https://ror.org/02z02cv32"); + expectedExtras.put("ror_orgunit_id", "https://ror.org/02z02cv32"); + expectedExtras.put("data-ror_orgunit_type", "Nonprofit"); + expectedExtras.put("ror_orgunit_type", "Nonprofit"); + expectedExtras.put("data-ror_orgunit_acronym", "WEICan, IEEC"); + expectedExtras.put("ror_orgunit_acronym", "WEICan, IEEC"); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(get("/api/submission/vocabularies/OrgUnitAuthority/entries") + .param("filter", "test")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.entries", hasSize(10))) + .andExpect(jsonPath("$._embedded.entries", + hasItem(matchItemAuthorityWithOtherInformations("will be referenced::ROR-ID::https://ror.org/02z02cv32", + "Wind Energy Institute of Canada", "Wind Energy Institute of Canada", "vocabularyEntry", + expectedExtras)))); + } + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java index 4c6cfae1334a..8e6a9d28b590 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java @@ -15,6 +15,7 @@ import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; @@ -53,6 +54,7 @@ import org.dspace.app.rest.model.ParameterValueRest; import org.dspace.app.rest.projection.Projection; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.app.util.SubmissionConfigReaderException; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.builder.CollectionBuilder; @@ -115,7 +117,7 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { private ChoiceAuthorityService choiceAuthorityService; @After - public void after() { + public void after() throws SubmissionConfigReaderException { DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); metadataAuthorityService.clearCache(); choiceAuthorityService.clearCache(); @@ -321,14 +323,73 @@ public void findAllScriptsSortedAlphabeticallyTest() throws Exception { } @Test - public void findAllScriptsWithNoAdminTest() throws Exception { + public void findAllScriptsGenericLoggedInUserTest() throws Exception { String token = getAuthToken(eperson.getEmail(), password); getClient(token).perform(get("/api/system/scripts")) .andExpect(status().isOk()) .andExpect(jsonPath("$.page", - is(PageMatcher.pageEntryWithTotalPagesAndElements(0, 20, 1, 4)))); + is(PageMatcher.pageEntryWithTotalPagesAndElements(0, 20, 1, 2)))); + } + + @Test + public void findAllScriptsAnonymousUserTest() throws Exception { + // this should be changed once we allow anonymous user to execute some scripts + getClient().perform(get("/api/system/scripts")) + .andExpect(status().isOk()); + } + @Test + public void findAllScriptsLocalAdminsTest() throws Exception { + context.turnOffAuthorisationSystem(); + EPerson comAdmin = EPersonBuilder.createEPerson(context) + .withEmail("comAdmin@example.com") + .withPassword(password).build(); + EPerson colAdmin = EPersonBuilder.createEPerson(context) + .withEmail("colAdmin@example.com") + .withPassword(password).build(); + EPerson itemAdmin = EPersonBuilder.createEPerson(context) + .withEmail("itemAdmin@example.com") + .withPassword(password).build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Community") + .withAdminGroup(comAdmin) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Collection") + .withAdminGroup(colAdmin) + .build(); + ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin) + .withTitle("Test item to curate").build(); + context.restoreAuthSystemState(); + ScriptConfiguration curateScriptConfiguration = + scriptConfigurations.stream().filter(scriptConfiguration + -> scriptConfiguration.getName().equals("curate")) + .findAny().get(); + + // the local admins have at least access to the curate script + // and not access to process-cleaner script + String comAdminToken = getAuthToken(comAdmin.getEmail(), password); + getClient(comAdminToken).perform(get("/api/system/scripts").param("size", "100")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem( + ScriptMatcher.matchScript(curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription())))) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))); + String colAdminToken = getAuthToken(colAdmin.getEmail(), password); + getClient(colAdminToken).perform(get("/api/system/scripts").param("size", "100")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem( + ScriptMatcher.matchScript(curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription())))) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))); + String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password); + getClient(itemAdminToken).perform(get("/api/system/scripts").param("size", "100")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem( + ScriptMatcher.matchScript(curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription())))) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))); } @Test @@ -374,14 +435,22 @@ public void findAllScriptsPaginationTest() throws Exception { getClient(token).perform(get("/api/system/scripts").param("size", "1").param("page", "1")) .andExpect(status().isOk()) + .andExpect( + jsonPath("$._embedded.scripts", + not( + hasItem( + ScriptMatcher.matchScript( + scriptConfigurations.get(10).getName(), + scriptConfigurations.get(10).getDescription() + ) + ) + ) + ) + ) .andExpect(jsonPath("$._embedded.scripts", hasItem( - ScriptMatcher.matchScript(scriptConfigurations.get(10).getName(), - scriptConfigurations.get(10).getDescription()) + ScriptMatcher.matchScript(alphabeticScripts.get(1).getName(), + alphabeticScripts.get(1).getDescription()) ))) - .andExpect(jsonPath("$._embedded.scripts", Matchers.not(hasItem( - ScriptMatcher.matchScript(alphabeticScripts.get(0).getName(), - alphabeticScripts.get(0).getDescription()) - )))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( Matchers.containsString("/api/system/scripts?"), Matchers.containsString("page=0"), Matchers.containsString("size=1")))) @@ -422,6 +491,63 @@ public void findOneScriptByNameTest() throws Exception { )); } + @Test + public void findOneScriptByNameLocalAdminsTest() throws Exception { + context.turnOffAuthorisationSystem(); + EPerson comAdmin = EPersonBuilder.createEPerson(context) + .withEmail("comAdmin@example.com") + .withPassword(password).build(); + EPerson colAdmin = EPersonBuilder.createEPerson(context) + .withEmail("colAdmin@example.com") + .withPassword(password).build(); + EPerson itemAdmin = EPersonBuilder.createEPerson(context) + .withEmail("itemAdmin@example.com") + .withPassword(password).build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Community") + .withAdminGroup(comAdmin) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Collection") + .withAdminGroup(colAdmin) + .build(); + ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin) + .withTitle("Test item to curate").build(); + context.restoreAuthSystemState(); + ScriptConfiguration curateScriptConfiguration = + scriptConfigurations.stream().filter(scriptConfiguration + -> scriptConfiguration.getName().equals("curate")) + .findAny().get(); + + String comAdminToken = getAuthToken(comAdmin.getEmail(), password); + String colAdminToken = getAuthToken(colAdmin.getEmail(), password); + String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password); + getClient(comAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ScriptMatcher + .matchScript( + curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription()))); + getClient(colAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ScriptMatcher + .matchScript( + curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription()))); + getClient(itemAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ScriptMatcher + .matchScript( + curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription()))); + } + + @Test + public void findOneScriptByNameNotAuthenticatedTest() throws Exception { + getClient().perform(get("/api/system/scripts/mock-script")) + .andExpect(status().isUnauthorized()); + } + @Test public void findOneScriptByNameTestAccessDenied() throws Exception { String token = getAuthToken(eperson.getEmail(), password); @@ -433,15 +559,51 @@ public void findOneScriptByNameTestAccessDenied() throws Exception { @Test public void findOneScriptByInvalidNameBadRequestExceptionTest() throws Exception { getClient().perform(get("/api/system/scripts/mock-script-invalid")) - .andExpect(status().isBadRequest()); + .andExpect(status().isNotFound()); } + /** + * This test will create a basic structure of communities, collections and items with some local admins at each + * level and verify that the local admins, nor generic users can run scripts reserved to administrator + * (i.e. default one that don't override the default + * {@link ScriptConfiguration#isAllowedToExecute(org.dspace.core.Context, List)} method implementation + */ @Test public void postProcessNonAdminAuthorizeException() throws Exception { - String token = getAuthToken(eperson.getEmail(), password); + context.turnOffAuthorisationSystem(); + EPerson comAdmin = EPersonBuilder.createEPerson(context) + .withEmail("comAdmin@example.com") + .withPassword(password).build(); + EPerson colAdmin = EPersonBuilder.createEPerson(context) + .withEmail("colAdmin@example.com") + .withPassword(password).build(); + EPerson itemAdmin = EPersonBuilder.createEPerson(context) + .withEmail("itemAdmin@example.com") + .withPassword(password).build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Community") + .withAdminGroup(comAdmin) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Collection") + .withAdminGroup(colAdmin) + .build(); + Item item = ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin) + .withTitle("Test item to curate").build(); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + String comAdmin_token = getAuthToken(eperson.getEmail(), password); + String colAdmin_token = getAuthToken(eperson.getEmail(), password); + String itemAdmin_token = getAuthToken(eperson.getEmail(), password); getClient(token).perform(multipart("/api/system/scripts/mock-script/processes")) .andExpect(status().isForbidden()); + getClient(comAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes")) + .andExpect(status().isForbidden()); + getClient(colAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes")) + .andExpect(status().isForbidden()); + getClient(itemAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes")) + .andExpect(status().isForbidden()); } @Test @@ -475,16 +637,6 @@ public void postProcessAdminWrongOptionsException() throws Exception { @Test public void postProcessAdminNoOptionsFailedStatus() throws Exception { -// List list = new LinkedList<>(); -// -// ParameterValueRest parameterValueRest = new ParameterValueRest(); -// parameterValueRest.setName("-z"); -// parameterValueRest.setValue("test"); -// ParameterValueRest parameterValueRest1 = new ParameterValueRest(); -// parameterValueRest1.setName("-q"); -// list.add(parameterValueRest); -// list.add(parameterValueRest1); - LinkedList parameters = new LinkedList<>(); parameters.add(new DSpaceCommandLineParameter("-z", "test")); @@ -520,7 +672,7 @@ public void postProcessNonExistingScriptNameException() throws Exception { String token = getAuthToken(admin.getEmail(), password); getClient(token).perform(multipart("/api/system/scripts/mock-script-invalid/processes")) - .andExpect(status().isBadRequest()); + .andExpect(status().isNotFound()); } @Test @@ -631,12 +783,19 @@ public void postProcessAndVerifyOutput() throws Exception { } + + @Test public void postProcessAdminWithWrongContentTypeBadRequestException() throws Exception { String token = getAuthToken(admin.getEmail(), password); + + getClient(token) + .perform(post("/api/system/scripts/mock-script/processes")) + .andExpect(status().isBadRequest()); + getClient(token).perform(post("/api/system/scripts/mock-script-invalid/processes")) - .andExpect(status().isBadRequest()); + .andExpect(status().isNotFound()); } @Test @@ -1412,6 +1571,7 @@ private void checkExportOutput( @Override @After public void destroy() throws Exception { + context.turnOffAuthorisationSystem(); CollectionUtils.emptyIfNull(processService.findAll(context)).stream().forEach(process -> { try { processService.delete(context, process); @@ -1419,6 +1579,7 @@ public void destroy() throws Exception { throw new RuntimeException(e); } }); + context.restoreAuthSystemState(); super.destroy(); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java index bd40cfdc9dd8..978d8feb58b9 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java @@ -411,4 +411,114 @@ public void postTestSuccesEmptyQuery() throws Exception { .andExpect(status().isCreated()); } + + @Test + public void postTestWithClickedObjectSuccess() throws Exception { + + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and two collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build(); + + //2. Three public items that are readable by Anonymous with different subjects + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .withSubject("ExtraEntry") + .build(); + + context.restoreAuthSystemState(); + + SearchEventRest searchEventRest = new SearchEventRest(); + + searchEventRest.setQuery("test"); + searchEventRest.setScope(col1.getID()); + searchEventRest.setConfiguration("default"); + searchEventRest.setDsoType("item"); + searchEventRest.setClickedObject(publicItem1.getID()); + + SearchResultsRest.Sorting sort = new SearchResultsRest.Sorting("title", "desc"); + searchEventRest.setSort(sort); + + PageRest pageRest = new PageRest(5, 20, 4, 1); + searchEventRest.setPage(pageRest); + + SearchResultsRest.AppliedFilter appliedFilter = + new SearchResultsRest.AppliedFilter("author", "contains", "test","test"); + List appliedFilterList = new LinkedList<>(); + appliedFilterList.add(appliedFilter); + searchEventRest.setAppliedFilters(appliedFilterList); + + ObjectMapper mapper = new ObjectMapper(); + + getClient().perform(post("/api/statistics/searchevents") + .content(mapper.writeValueAsBytes(searchEventRest)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + } + + @Test + public void postTestWithClickedObjectNotExisting() throws Exception { + + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and two collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build(); + + //2. Three public items that are readable by Anonymous with different subjects + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .withSubject("ExtraEntry") + .build(); + + context.restoreAuthSystemState(); + + SearchEventRest searchEventRest = new SearchEventRest(); + + searchEventRest.setQuery("test"); + searchEventRest.setScope(col1.getID()); + searchEventRest.setConfiguration("default"); + searchEventRest.setDsoType("item"); + searchEventRest.setClickedObject(UUID.randomUUID()); + + SearchResultsRest.Sorting sort = new SearchResultsRest.Sorting("title", "desc"); + searchEventRest.setSort(sort); + + PageRest pageRest = new PageRest(5, 20, 4, 1); + searchEventRest.setPage(pageRest); + + SearchResultsRest.AppliedFilter appliedFilter = + new SearchResultsRest.AppliedFilter("author", "contains", "test","test"); + List appliedFilterList = new LinkedList<>(); + appliedFilterList.add(appliedFilter); + searchEventRest.setAppliedFilters(appliedFilterList); + + ObjectMapper mapper = new ObjectMapper(); + + getClient().perform(post("/api/statistics/searchevents") + .content(mapper.writeValueAsBytes(searchEventRest)) + .contentType(contentType)) + .andExpect(status().isBadRequest()); + + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SitemapRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SitemapRestControllerIT.java index cbcf970547f7..175fb34e6cac 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SitemapRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SitemapRestControllerIT.java @@ -8,6 +8,7 @@ package org.dspace.app.rest; import static org.dspace.builder.ItemBuilder.createItem; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; @@ -16,6 +17,7 @@ import javax.servlet.ServletException; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; import org.dspace.content.Collection; @@ -38,10 +40,22 @@ public class SitemapRestControllerIT extends AbstractControllerIntegrationTest { @Autowired ConfigurationService configurationService; + @Autowired + ResourcePolicyService policyService; + private final static String SITEMAPS_ENDPOINT = "sitemaps"; private Item item1; private Item item2; + private Item itemRestricted; + private Item itemUndiscoverable; + private Item entityPublication; + private Item entityPublicationRestricted; + private Item entityPublicationUndiscoverable; + private Community community; + private Community communityRestricted; + private Collection collection; + private Collection collectionRestricted; @Before @Override @@ -52,8 +66,16 @@ public void setUp() throws Exception { context.turnOffAuthorisationSystem(); - Community community = CommunityBuilder.createCommunity(context).build(); - Collection collection = CollectionBuilder.createCollection(context, community).build(); + community = CommunityBuilder.createCommunity(context).build(); + communityRestricted = CommunityBuilder.createCommunity(context).build(); + policyService.removeAllPolicies(context, communityRestricted); + collection = CollectionBuilder.createCollection(context, community).build(); + collectionRestricted = CollectionBuilder.createCollection(context, community).build(); + Collection publicationCollection = CollectionBuilder.createCollection(context, community) + .withEntityType("Publication") + .withName("Publication Collection").build(); + policyService.removeAllPolicies(context, collectionRestricted); + this.item1 = createItem(context, collection) .withTitle("Test 1") .withIssueDate("2010-10-17") @@ -62,6 +84,30 @@ public void setUp() throws Exception { .withTitle("Test 2") .withIssueDate("2015-8-3") .build(); + this.itemRestricted = createItem(context, collection) + .withTitle("Test 3") + .withIssueDate("2015-8-3") + .build(); + policyService.removeAllPolicies(context, itemRestricted); + this.itemUndiscoverable = createItem(context, collection) + .withTitle("Test 4") + .withIssueDate("2015-8-3") + .makeUnDiscoverable() + .build(); + this.entityPublication = createItem(context, publicationCollection) + .withTitle("Item Publication") + .withIssueDate("2015-8-3") + .build(); + this.entityPublicationRestricted = createItem(context, publicationCollection) + .withTitle("Item Publication Restricted") + .withIssueDate("2015-8-3") + .build(); + policyService.removeAllPolicies(context, entityPublicationRestricted); + this.entityPublicationUndiscoverable = createItem(context, publicationCollection) + .withTitle("Item Publication") + .withIssueDate("2015-8-3") + .makeUnDiscoverable() + .build(); runDSpaceScript("generate-sitemaps"); @@ -127,9 +173,39 @@ public void testSitemap_sitemap0Html() throws Exception { .andReturn(); String response = result.getResponse().getContentAsString(); + // contains a link to communities: [dspace.ui.url]/communities/ + assertTrue(response + .contains(configurationService.getProperty("dspace.ui.url") + "/communities/" + community.getID())); + // contains a link to collections: [dspace.ui.url]/collections/ + assertTrue(response + .contains(configurationService.getProperty("dspace.ui.url") + "/collections/" + collection.getID())); // contains a link to items: [dspace.ui.url]/items/ assertTrue(response.contains(configurationService.getProperty("dspace.ui.url") + "/items/" + item1.getID())); assertTrue(response.contains(configurationService.getProperty("dspace.ui.url") + "/items/" + item2.getID())); + // contains proper link to entities items + assertTrue(response.contains(configurationService.getProperty("dspace.ui.url") + "/entities/publication/" + + entityPublication.getID())); + assertFalse(response + .contains(configurationService.getProperty("dspace.ui.url") + "/items/" + entityPublication.getID())); + // does not contain links to restricted content + assertFalse(response.contains( + configurationService.getProperty("dspace.ui.url") + "/communities/" + communityRestricted.getID())); + assertFalse(response.contains( + configurationService.getProperty("dspace.ui.url") + "/collections/" + collectionRestricted.getID())); + assertFalse(response + .contains(configurationService.getProperty("dspace.ui.url") + "/items/" + itemRestricted.getID())); + assertFalse(response.contains(configurationService.getProperty("dspace.ui.url") + "/entities/publication/" + + entityPublicationRestricted.getID())); + assertFalse(response.contains( + configurationService.getProperty("dspace.ui.url") + "/items/" + entityPublicationRestricted.getID())); + // does not contain links to undiscoverable content + assertFalse(response + .contains(configurationService.getProperty("dspace.ui.url") + "/items/" + itemUndiscoverable.getID())); + assertFalse(response.contains(configurationService.getProperty("dspace.ui.url") + "/entities/publication/" + + entityPublicationUndiscoverable.getID())); + assertFalse(response.contains(configurationService.getProperty("dspace.ui.url") + "/items/" + + entityPublicationUndiscoverable.getID())); + } @Test @@ -160,8 +236,37 @@ public void testSitemap_sitemap0Xml() throws Exception { .andReturn(); String response = result.getResponse().getContentAsString(); + // contains a link to communities: [dspace.ui.url]/communities/ + assertTrue(response + .contains(configurationService.getProperty("dspace.ui.url") + "/communities/" + community.getID())); + // contains a link to collections: [dspace.ui.url]/collections/ + assertTrue(response + .contains(configurationService.getProperty("dspace.ui.url") + "/collections/" + collection.getID())); // contains a link to items: [dspace.ui.url]/items/ assertTrue(response.contains(configurationService.getProperty("dspace.ui.url") + "/items/" + item1.getID())); assertTrue(response.contains(configurationService.getProperty("dspace.ui.url") + "/items/" + item2.getID())); + // contains proper link to entities items + assertTrue(response.contains(configurationService.getProperty("dspace.ui.url") + "/entities/publication/" + + entityPublication.getID())); + assertFalse(response + .contains(configurationService.getProperty("dspace.ui.url") + "/items/" + entityPublication.getID())); + // does not contain links to restricted content + assertFalse(response.contains( + configurationService.getProperty("dspace.ui.url") + "/communities/" + communityRestricted.getID())); + assertFalse(response.contains( + configurationService.getProperty("dspace.ui.url") + "/collections/" + collectionRestricted.getID())); + assertFalse(response + .contains(configurationService.getProperty("dspace.ui.url") + "/items/" + itemRestricted.getID())); + assertFalse(response.contains(configurationService.getProperty("dspace.ui.url") + "/entities/publication/" + + entityPublicationRestricted.getID())); + assertFalse(response.contains( + configurationService.getProperty("dspace.ui.url") + "/items/" + entityPublicationRestricted.getID())); + // does not contain links to undiscoverable content + assertFalse(response + .contains(configurationService.getProperty("dspace.ui.url") + "/items/" + itemUndiscoverable.getID())); + assertFalse(response.contains(configurationService.getProperty("dspace.ui.url") + "/entities/publication/" + + entityPublicationUndiscoverable.getID())); + assertFalse(response.contains(configurationService.getProperty("dspace.ui.url") + "/items/" + + entityPublicationUndiscoverable.getID())); } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/StatisticsRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/StatisticsRestRepositoryIT.java index 0f7996a765f3..adf2ea830613 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/StatisticsRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/StatisticsRestRepositoryIT.java @@ -25,24 +25,28 @@ import static org.dspace.app.rest.utils.UsageReportUtils.TOP_DOWNLOAD_CITIES_REPORT_ID; import static org.dspace.app.rest.utils.UsageReportUtils.TOP_DOWNLOAD_CONTINENTS_REPORT_ID; import static org.dspace.app.rest.utils.UsageReportUtils.TOP_DOWNLOAD_COUNTRIES_REPORT_ID; +import static org.dspace.app.rest.utils.UsageReportUtils.TOP_ITEMS_CATEGORIES_REPORT_ID; +import static org.dspace.app.rest.utils.UsageReportUtils.TOP_ITEMS_CITIES_REPORT_ID; +import static org.dspace.app.rest.utils.UsageReportUtils.TOP_ITEMS_CONTINENTS_REPORT_ID; +import static org.dspace.app.rest.utils.UsageReportUtils.TOP_ITEMS_COUNTRIES_REPORT_ID; import static org.dspace.app.rest.utils.UsageReportUtils.TOP_ITEMS_REPORT_ID; import static org.dspace.app.rest.utils.UsageReportUtils.TOP_ITEMS_REPORT_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS; import static org.dspace.app.rest.utils.UsageReportUtils.TOTAL_DOWNLOADS_REPORT_ID; import static org.dspace.app.rest.utils.UsageReportUtils.TOTAL_DOWNLOADS_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS; import static org.dspace.app.rest.utils.UsageReportUtils.TOTAL_DOWNLOAD_PER_MONTH_REPORT_ID; -//import static org.dspace.app.rest.utils.UsageReportUtils.TOTAL_DOWNLOADS_REPORT_ID_RELATION_PERSON_RESEARCHOUTPUTS; +import static org.dspace.app.rest.utils.UsageReportUtils.TOTAL_ITEMS_VISITS_PER_MONTH_REPORT_ID; +import static org.dspace.app.rest.utils.UsageReportUtils.TOTAL_ITEMS_VISITS_REPORT_ID; import static org.dspace.app.rest.utils.UsageReportUtils.TOTAL_VISITS_PER_MONTH_REPORT_ID; import static org.dspace.app.rest.utils.UsageReportUtils.TOTAL_VISITS_PER_MONTH_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS; import static org.dspace.app.rest.utils.UsageReportUtils.TOTAL_VISITS_PER_MONTH_REPORT_ID_RELATION_PERSON_PROJECTS; import static org.dspace.app.rest.utils.UsageReportUtils.TOTAL_VISITS_PER_MONTH_REPORT_ID_RELATION_PERSON_RESEARCHOUTPUTS; import static org.dspace.app.rest.utils.UsageReportUtils.TOTAL_VISITS_REPORT_ID; -//import static org.dspace.app.rest.utils.UsageReportUtils.TOTAL_VISITS_REPORT_ID_RELATION_ORGUNIT_PROJECTS; import static org.dspace.app.rest.utils.UsageReportUtils.TOTAL_VISITS_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS; import static org.dspace.app.rest.utils.UsageReportUtils.TOTAL_VISITS_REPORT_ID_RELATION_PERSON_PROJECTS; import static org.dspace.app.rest.utils.UsageReportUtils.TOTAL_VISITS_REPORT_ID_RELATION_PERSON_RESEARCHOUTPUTS; import static org.dspace.app.rest.utils.UsageReportUtils.TOTAL_VISITS_TOTAL_DOWNLOADS; import static org.dspace.app.rest.utils.UsageReportUtils.TOTAL_VISITS_TOTAL_DOWNLOADS_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS; -//import static org.dspace.app.rest.utils.UsageReportUtils.TOTAL_VISITS_TOTAL_DOWNLOADS_RELATION_PERSON_RESEARCHOUTPUTS; +import static org.dspace.util.FunctionalUtils.throwingConsumerWrapper; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.not; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; @@ -61,9 +65,12 @@ import java.util.Calendar; import java.util.Collections; import java.util.Date; +import java.util.LinkedList; import java.util.List; import java.util.Locale; +import java.util.Queue; import java.util.UUID; +import java.util.function.Consumer; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.lang3.StringUtils; @@ -97,6 +104,9 @@ import org.dspace.core.Constants; import org.dspace.eperson.EPerson; import org.dspace.services.ConfigurationService; +import org.dspace.services.EventService; +import org.dspace.services.model.Event; +import org.dspace.services.model.EventListener; import org.dspace.statistics.factory.StatisticsServiceFactory; import org.dspace.util.MultiFormatDateParser; import org.hamcrest.Matchers; @@ -114,10 +124,14 @@ */ public class StatisticsRestRepositoryIT extends AbstractControllerIntegrationTest { + protected final StatisticsEventListener statisticsEventListener = new StatisticsEventListener(); + @Autowired ConfigurationService configurationService; @Autowired protected AuthorizeService authorizeService; + @Autowired + protected EventService eventService; private Community communityNotVisited; private Community communityVisited; @@ -162,7 +176,9 @@ public void setUp() throws Exception { itemVisited = ItemBuilder.createItem(context, collectionNotVisited).build(); itemNotVisitedWithBitstreams = ItemBuilder.createItem(context, collectionNotVisited).build(); bitstreamNotVisited = BitstreamBuilder.createBitstream(context, - itemNotVisitedWithBitstreams, toInputStream("test", UTF_8)).withName("BitstreamNotVisitedName").build(); + itemNotVisitedWithBitstreams, + toInputStream("test", UTF_8)) + .withName("BitstreamNotVisitedName").build(); bitstreamVisited = BitstreamBuilder .createBitstream(context, itemNotVisitedWithBitstreams, toInputStream("test", UTF_8)) .withName("BitstreamVisitedName").build(); @@ -194,20 +210,22 @@ public void setUp() throws Exception { .build(); //bitstream for first publication of person bitstreampublication_first = BitstreamBuilder - .createBitstream(context, publicationVisited1, - toInputStream("test", UTF_8)) - .withName("bitstream1") + .createBitstream(context, publicationVisited1, + toInputStream("test", UTF_8)) + .withName("bitstream1") .build(); //bitstream for second publication of person bitstreampublication_second = BitstreamBuilder - .createBitstream(context, publicationVisited2, - toInputStream("test", UTF_8)) - .withName("bitstream2") + .createBitstream(context, publicationVisited2, + toInputStream("test", UTF_8)) + .withName("bitstream2") .build(); loggedInToken = getAuthToken(eperson.getEmail(), password); adminToken = getAuthToken(admin.getEmail(), password); + this.eventService.registerEventListener(this.statisticsEventListener); + context.restoreAuthSystemState(); } @@ -220,26 +238,26 @@ public void usagereports_withoutId_NotImplementedException() throws Exception { @Test public void usagereports_notProperUUIDAndReportId_Exception() throws Exception { getClient(adminToken).perform(get("/api/statistics/usagereports/notProperUUIDAndReportId")) - .andExpect(status().is(HttpStatus.BAD_REQUEST.value())); + .andExpect(status().is(HttpStatus.BAD_REQUEST.value())); } @Test public void usagereports_nonValidUUIDpart_Exception() throws Exception { getClient(adminToken).perform(get("/api/statistics/usagereports/notAnUUID" + "_" + TOTAL_VISITS_REPORT_ID)) - .andExpect(status().is(HttpStatus.BAD_REQUEST.value())); + .andExpect(status().is(HttpStatus.BAD_REQUEST.value())); } @Test public void usagereports_nonValidReportIDpart_Exception() throws Exception { getClient(adminToken).perform(get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + - "_NotValidReport")) - .andExpect(status().is(HttpStatus.NOT_FOUND.value())); + "_NotValidReport")) + .andExpect(status().is(HttpStatus.NOT_FOUND.value())); } @Test public void usagereports_nonValidReportIDpart_Exception_By_Anonymous_Unauthorized_Test() throws Exception { getClient().perform(get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + - "_NotValidReport")) + "_NotValidReport")) .andExpect(status().isUnauthorized()); } @@ -247,15 +265,15 @@ public void usagereports_nonValidReportIDpart_Exception_By_Anonymous_Unauthorize public void usagereports_nonValidReportIDpart_Exception_By_Anonymous_Test() throws Exception { configurationService.setProperty("usage-statistics.authorization.admin.usage", false); getClient().perform(get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + - "_NotValidReport")) + "_NotValidReport")) .andExpect(status().isNotFound()); } @Test public void usagereports_NonExistentUUID_Exception() throws Exception { getClient(adminToken).perform( - get("/api/statistics/usagereports/" + UUID.randomUUID() + "_" + TOTAL_VISITS_REPORT_ID)) - .andExpect(status().is(HttpStatus.NOT_FOUND.value())); + get("/api/statistics/usagereports/" + UUID.randomUUID() + "_" + TOTAL_VISITS_REPORT_ID)) + .andExpect(status().is(HttpStatus.NOT_FOUND.value())); } @Test @@ -272,7 +290,7 @@ public void usagereport_onlyAdminReadRights() throws Exception { // We request a dso's TotalVisits usage stat report as admin getClient(adminToken).perform( get("/api/statistics/usagereports/" + - itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) // ** THEN ** .andExpect(status().isOk()); } @@ -284,8 +302,8 @@ public void usagereport_onlyAdminReadRights_unvalidToken() throws Exception { // We request a dso's TotalVisits usage stat report with unvalid token getClient("unvalidToken").perform( get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isUnauthorized()); + // ** THEN ** + .andExpect(status().isUnauthorized()); } @Test @@ -307,19 +325,19 @@ public void usagereport_loggedInUserReadRights() throws Exception { // We request a dso's TotalVisits usage stat report as anon but dso has no read policy for anon getClient().perform( get("/api/statistics/usagereports/" + - itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) // ** THEN ** .andExpect(status().isUnauthorized()); // We request a dso's TotalVisits usage stat report as logged in eperson and has read policy for this user getClient(loggedInToken).perform( get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isForbidden()); + // ** THEN ** + .andExpect(status().isForbidden()); // We request a dso's TotalVisits usage stat report as another logged in eperson and has no read policy for // this user getClient(anotherLoggedInUserToken).perform( get("/api/statistics/usagereports/" + - itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) // ** THEN ** .andExpect(status().isForbidden()); } @@ -342,19 +360,19 @@ public void usagereport_loggedInUserReadRights_and_usage_statistics_admin_is_fal String anotherLoggedInUserToken = getAuthToken(eperson1.getEmail(), password); // We request a dso's TotalVisits usage stat report as anon but dso has no read policy for anon getClient().perform(get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + - TOTAL_VISITS_REPORT_ID)) + TOTAL_VISITS_REPORT_ID)) .andExpect(status().isUnauthorized()); // We request a dso's TotalVisits usage stat report as logged in eperson and has read policy for this user getClient(loggedInToken).perform(get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + - "_" + TOTAL_VISITS_REPORT_ID)) + "_" + TOTAL_VISITS_REPORT_ID)) .andExpect(status().isOk()); // We request a dso's TotalVisits usage stat report as another logged // in eperson and has no read policy for this user getClient(anotherLoggedInUserToken).perform( - get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - .andExpect(status().isForbidden()); + get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + .andExpect(status().isForbidden()); } @Test @@ -365,27 +383,30 @@ public void totalVisitsReport_Community_Visited() throws Exception { viewEventRest.setTargetType("community"); viewEventRest.setTargetId(communityVisited.getID()); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + // And request that community's TotalVisits stat report + getClient(adminToken).perform( + get("/api/statistics/usagereports/" + communityVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + communityVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + List.of( + getExpectedDsoViews(communityVisited, 1) + ) + ) + ))); + })); + ObjectMapper mapper = new ObjectMapper(); getClient(loggedInToken).perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - - // And request that community's TotalVisits stat report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + communityVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - communityVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - List.of( - getExpectedDsoViews(communityVisited, 1) - ) - ) - ))); } @Test @@ -394,18 +415,18 @@ public void totalVisitsReport_Community_NotVisited() throws Exception { // Community is never visited // And request that community's TotalVisits stat report getClient(adminToken).perform( - get("/api/statistics/usagereports/" + communityNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - communityNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - List.of( - getExpectedDsoViews(communityNotVisited, 0) - ) - ) - ))); + get("/api/statistics/usagereports/" + communityNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + communityNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + List.of( + getExpectedDsoViews(communityNotVisited, 0) + ) + ) + ))); } @Test @@ -423,25 +444,30 @@ public void totalVisitsReport_Collection_Visited() throws Exception { .contentType(contentType)) .andExpect(status().isCreated()); + Thread.sleep(1000); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + // And request that collection's TotalVisits stat report + getClient(adminToken).perform( + get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + collectionVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + List.of( + getExpectedDsoViews(collectionVisited, 2) + ) + ) + ))); + })); + getClient(loggedInToken).perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - - // And request that collection's TotalVisits stat report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - collectionVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - List.of( - getExpectedDsoViews(collectionVisited, 2) - ) - ) - ))); } @Test @@ -450,22 +476,43 @@ public void totalVisitsReport_Collection_NotVisited() throws Exception { // Collection is never visited // And request that collection's TotalVisits stat report getClient(adminToken).perform( - get("/api/statistics/usagereports/" + collectionNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - collectionNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - List.of( - getExpectedDsoViews(collectionNotVisited, 0) - ) - ) - ))); + get("/api/statistics/usagereports/" + collectionNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + collectionNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + List.of( + getExpectedDsoViews(collectionNotVisited, 0) + ) + ) + ))); } @Test public void totalVisitsReport_Item_Visited() throws Exception { + + Thread.sleep(1000); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + // And request that collection's TotalVisits stat report + getClient(adminToken).perform( + get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + List.of( + getExpectedDsoViews(itemVisited, 1) + ) + ) + ))); + })); + // ** WHEN ** // We visit an Item ViewEventRest viewEventRest = new ViewEventRest(); @@ -478,21 +525,6 @@ public void totalVisitsReport_Item_Visited() throws Exception { .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - - // And request that collection's TotalVisits stat report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - List.of( - getExpectedDsoViews(itemVisited, 1) - ) - ) - ))); } @Test @@ -506,8 +538,8 @@ public void totalVisitsReport_Item_NotVisited() throws Exception { // And request that item's TotalVisits stat report getClient(adminToken).perform( get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) + // ** THEN ** + .andExpect(status().isOk()) .andExpect(jsonPath("$", Matchers.is( UsageReportMatcher.matchUsageReport( itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID, @@ -518,11 +550,11 @@ public void totalVisitsReport_Item_NotVisited() throws Exception { // only admin access visits report getClient(loggedInToken).perform( - get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - .andExpect(status().isForbidden()); + get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + .andExpect(status().isForbidden()); getClient().perform( - get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) .andExpect(status().isUnauthorized()); // make statistics visible to all @@ -530,7 +562,7 @@ public void totalVisitsReport_Item_NotVisited() throws Exception { getClient(loggedInToken).perform( get("/api/statistics/usagereports/" - + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) .andExpect(status().isOk()) .andExpect(jsonPath("$", Matchers.is( UsageReportMatcher.matchUsageReport( @@ -540,9 +572,9 @@ public void totalVisitsReport_Item_NotVisited() throws Exception { ) ))); - getClient().perform( + getClient().perform( get("/api/statistics/usagereports/" - + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) .andExpect(status().isOk()) .andExpect(jsonPath("$", Matchers.is( UsageReportMatcher.matchUsageReport( @@ -568,31 +600,31 @@ public void totalVisitsReport_Bitstream_Visited() throws Exception { .contentType(contentType)) .andExpect(status().isCreated()); + Thread.sleep(1000); List expectedPoints = List.of( getExpectedDsoViews(bitstreamVisited, 1) ); // And request that bitstream's TotalVisits stat report getClient(adminToken).perform( - get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - expectedPoints - ) - ))); - + get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + expectedPoints + ) + ))); // only admin access visits report getClient(loggedInToken).perform( - get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - .andExpect(status().isForbidden()); + get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + .andExpect(status().isForbidden()); getClient().perform( - get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - .andExpect(status().isUnauthorized()); + get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + .andExpect(status().isUnauthorized()); // make statistics visible to all configurationService.setProperty("usage-statistics.authorization.admin.usage", false); @@ -632,24 +664,24 @@ public void totalVisitsReport_Bitstream_NotVisited() throws Exception { String authToken = getAuthToken(admin.getEmail(), password); // And request that bitstream's TotalVisits stat report getClient(authToken).perform( - get("/api/statistics/usagereports/" + bitstreamNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - bitstreamNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - expectedPoints - ) - ))); + get("/api/statistics/usagereports/" + bitstreamNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + bitstreamNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + expectedPoints + ) + ))); String tokenEPerson = getAuthToken(eperson.getEmail(), password); getClient(tokenEPerson).perform( - get("/api/statistics/usagereports/" + bitstreamNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - .andExpect(status().isForbidden()); + get("/api/statistics/usagereports/" + bitstreamNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + .andExpect(status().isForbidden()); getClient().perform( - get("/api/statistics/usagereports/" + bitstreamNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + get("/api/statistics/usagereports/" + bitstreamNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) .andExpect(status().isUnauthorized()); // make statistics visible to all @@ -666,7 +698,7 @@ public void totalVisitsReport_Bitstream_NotVisited() throws Exception { ) ))); - getClient().perform( + getClient().perform( get("/api/statistics/usagereports/" + bitstreamNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) .andExpect(status().isOk()) .andExpect(jsonPath("$", Matchers.is( @@ -680,6 +712,73 @@ public void totalVisitsReport_Bitstream_NotVisited() throws Exception { @Test public void totalVisitsPerMonthReport_Item_Visited() throws Exception { + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + List expectedPoints = getLastMonthVisitPoints(1); + + // And request that item's TotalVisitsPerMonth stat report + getClient(adminToken).perform( + get( + "/api/statistics/usagereports/" + itemVisited.getID() + + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID + ) + ) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + expectedPoints + ) + ))); + + // only admin has access + getClient(loggedInToken).perform( + get( + "/api/statistics/usagereports/" + itemVisited.getID() + + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID + ) + ) + .andExpect(status().isForbidden()); + + getClient().perform( + get("/api/statistics/usagereports/" + itemVisited.getID() + + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID)) + .andExpect(status().isUnauthorized()); + + // make statistics visible to all + configurationService.setProperty("usage-statistics.authorization.admin.usage", false); + + getClient(loggedInToken).perform( + get( + "/api/statistics/usagereports/" + itemVisited.getID() + + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID + ) + ) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + expectedPoints + ) + ))); + + getClient().perform( + get("/api/statistics/usagereports/" + itemVisited.getID() + + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + expectedPoints + ) + ))); + })); + // ** WHEN ** // We visit an Item ViewEventRest viewEventRest = new ViewEventRest(); @@ -691,56 +790,7 @@ public void totalVisitsPerMonthReport_Item_Visited() throws Exception { getClient(loggedInToken).perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) - .andExpect(status().isCreated()); - - List expectedPoints = getLastMonthVisitPoints(1); - - // And request that item's TotalVisitsPerMonth stat report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - expectedPoints - ) - ))); - - // only admin has access - getClient(loggedInToken).perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID)) - .andExpect(status().isForbidden()); - - getClient().perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID)) - .andExpect(status().isUnauthorized()); - - // make statistics visible to all - configurationService.setProperty("usage-statistics.authorization.admin.usage", false); - - getClient(loggedInToken).perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID)) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - expectedPoints - ) - ))); - - getClient().perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID)) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - expectedPoints - ) - ))); + .andExpect(status().isCreated()); } @Test @@ -749,17 +799,17 @@ public void totalVisitsPerMonthReport_Item_NotVisited() throws Exception { // Item is not visited // And request that item's TotalVisitsPerMonth stat report getClient(adminToken).perform( - get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + - TOTAL_VISITS_PER_MONTH_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - getLastMonthVisitPoints(0) - ) - ))); + get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + + TOTAL_VISITS_PER_MONTH_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + getLastMonthVisitPoints(0) + ) + ))); } @Test @@ -777,23 +827,29 @@ public void totalVisitsPerMonthReport_Collection_Visited() throws Exception { .contentType(contentType)) .andExpect(status().isCreated()); + Thread.sleep(3000); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + // And request that collection's TotalVisitsPerMonth stat report + getClient(adminToken).perform( + get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + + TOTAL_VISITS_PER_MONTH_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + collectionVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + getLastMonthVisitPoints(2) + ) + ))); + })); + getClient(loggedInToken).perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - - // And request that collection's TotalVisitsPerMonth stat report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - collectionVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - getLastMonthVisitPoints(2) - ) - ))); } @Test @@ -804,67 +860,100 @@ public void TotalDownloadsReport_Bitstream() throws Exception { viewEventRest.setTargetType("bitstream"); viewEventRest.setTargetId(bitstreamVisited.getID()); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + List expectedPoints = List.of( + getExpectedDsoViews(bitstreamVisited, 1) + ); + + // And request that bitstreams's TotalDownloads stat report + getClient(adminToken).perform( + get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, + TOTAL_DOWNLOADS_REPORT_ID, + expectedPoints + ) + ))); + + // only admin has access to downloads report + getClient(loggedInToken).perform( + get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) + .andExpect(status().isForbidden()); + + getClient().perform( + get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) + .andExpect(status().isUnauthorized()); + + // make statistics visible to all + configurationService.setProperty("usage-statistics.authorization.admin.usage", false); + + getClient(loggedInToken).perform( + get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, + TOTAL_DOWNLOADS_REPORT_ID, + expectedPoints + ) + ))); + + getClient().perform( + get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, + TOTAL_DOWNLOADS_REPORT_ID, + expectedPoints + ) + ))); + })); ObjectMapper mapper = new ObjectMapper(); getClient(loggedInToken).perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - - List expectedPoints = List.of( - getExpectedDsoViews(bitstreamVisited, 1) - ); - - // And request that bitstreams's TotalDownloads stat report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, - TOTAL_DOWNLOADS_REPORT_ID, - expectedPoints - ) - ))); - - // only admin has access to downloads report - getClient(loggedInToken).perform( - get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) - .andExpect(status().isForbidden()); - - getClient().perform( - get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) - .andExpect(status().isUnauthorized()); - - // make statistics visible to all - configurationService.setProperty("usage-statistics.authorization.admin.usage", false); - - getClient(loggedInToken).perform( - get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, - TOTAL_DOWNLOADS_REPORT_ID, - expectedPoints - ) - ))); - - getClient().perform( - get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, - TOTAL_DOWNLOADS_REPORT_ID, - expectedPoints - ) - ))); } @Test public void TotalDownloadsReport_Item() throws Exception { + + Thread.sleep(1000); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + UsageReportPointDsoTotalVisitsRest expectedPoint = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint.addValue("views", 1); + expectedPoint.setId(bitstreamVisited.getID().toString()); + expectedPoint.setLabel("BitstreamVisitedName"); + expectedPoint.setType("bitstream"); + + // And request that item's TotalDownloads stat report + getClient(adminToken) + .perform( + get("/api/statistics/usagereports/" + + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID + ) + ) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, + TOTAL_DOWNLOADS_REPORT_ID, + List.of( + getExpectedDsoViews(bitstreamVisited, 1) + ) + ) + ))); + })); + // ** WHEN ** // We visit an Item's bitstream ViewEventRest viewEventRest = new ViewEventRest(); @@ -877,28 +966,6 @@ public void TotalDownloadsReport_Item() throws Exception { .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - - UsageReportPointDsoTotalVisitsRest expectedPoint = new UsageReportPointDsoTotalVisitsRest(); - expectedPoint.addValue("views", 1); - expectedPoint.setId(bitstreamVisited.getID().toString()); - expectedPoint.setLabel("BitstreamVisitedName"); - expectedPoint.setType("bitstream"); - - // And request that item's TotalDownloads stat report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + - TOTAL_DOWNLOADS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, - TOTAL_DOWNLOADS_REPORT_ID, - List.of( - getExpectedDsoViews(bitstreamVisited, 1) - ) - ) - ))); } @Test @@ -907,24 +974,17 @@ public void TotalDownloadsReport_Item_NotVisited() throws Exception { // You don't visit an item's bitstreams // And request that item's TotalDownloads stat report getClient(adminToken).perform( - get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + - TOTAL_DOWNLOADS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, - TOTAL_DOWNLOADS_REPORT_ID, - List.of() - ) - ))); - } - - @Test - public void TotalDownloadsReport_NotSupportedDSO_Collection() throws Exception { - getClient(adminToken) - .perform(get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) - .andExpect(status().isNotFound()); + get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + + TOTAL_DOWNLOADS_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, + TOTAL_DOWNLOADS_REPORT_ID, + List.of() + ) + ))); } /** @@ -938,64 +998,72 @@ public void topCountriesReport_Collection_Visited() throws Exception { viewEventRest.setTargetType("collection"); viewEventRest.setTargetId(collectionVisited.getID()); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + List expectedPoints = List.of( + getExpectedCountryViews(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale()), + 1)); + + // And request that collection's TopCountries report + getClient(adminToken).perform( + get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + expectedPoints + ) + ))); + + // only admin has access to countries report + getClient(loggedInToken).perform( + get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) + .andExpect(status().isForbidden()); + + getClient().perform( + get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) + .andExpect(status().isUnauthorized()); + + // make statistics visible to all + configurationService.setProperty("usage-statistics.authorization.admin.usage", false); + + getClient(loggedInToken).perform( + get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + expectedPoints + ) + ))); + + getClient().perform( + get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + expectedPoints + ) + ))); + })); ObjectMapper mapper = new ObjectMapper(); - getClient(loggedInToken).perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) - .andExpect(status().isCreated()); - - List expectedPoints = List.of( - getExpectedCountryViews(Locale.US.getCountry(), - Locale.US.getDisplayCountry(context.getCurrentLocale()), - 1)); - - // And request that collection's TopCountries report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, - expectedPoints - ) - ))); - - // only admin has access to countries report - getClient(loggedInToken).perform( - get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) - .andExpect(status().isForbidden()); - - getClient().perform( - get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) - .andExpect(status().isUnauthorized()); - - // make statistics visible to all - configurationService.setProperty("usage-statistics.authorization.admin.usage", false); - - getClient(loggedInToken).perform( - get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, - expectedPoints - ) - ))); + .andExpect(status().isCreated()); + } - getClient().perform( - get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, - expectedPoints - ) - ))); + @Test + public void TotalDownloadsReport_SupportedDSO_Collection() throws Exception { + getClient(adminToken) + .perform(get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) + .andExpect(status().isOk()); } /** @@ -1016,29 +1084,35 @@ public void topCountriesReport_Community_Visited() throws Exception { .contentType(contentType)) .andExpect(status().isCreated()); - getClient(loggedInToken).perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest)) - .contentType(contentType)) - .andExpect(status().isCreated()); + Thread.sleep(1000); UsageReportPointCountryRest expectedPoint = new UsageReportPointCountryRest(); expectedPoint.addValue("views", 2); - expectedPoint.setIdAndLabel(Locale.US.getCountry(), Locale.US.getDisplayCountry(context.getCurrentLocale())); + expectedPoint.setIdAndLabel(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale())); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + // And request that collection's TopCountries report + getClient(adminToken).perform( + get("/api/statistics/usagereports/" + communityVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + communityVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + List.of( + getExpectedCountryViews("US", "United States", 2) + ) + ) + ))); + })); - // And request that collection's TopCountries report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + communityVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - communityVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, - List.of( - getExpectedCountryViews("US", "United States", 2) - ) - ) - ))); + getClient(loggedInToken).perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest)) + .contentType(contentType)) + .andExpect(status().isCreated()); } /** @@ -1050,16 +1124,17 @@ public void topCountriesReport_Item_NotVisited() throws Exception { // Item is not visited // And request that item's TopCountries report getClient(adminToken).perform( - get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemNotVisitedWithBitstreams.getID() + "_" + TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, - List.of() - ) - ))); + get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + + "_" + TOP_COUNTRIES_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemNotVisitedWithBitstreams.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + List.of() + ) + ))); } /** @@ -1073,75 +1148,78 @@ public void topCitiesReport_Item_Visited() throws Exception { viewEventRest.setTargetType("item"); viewEventRest.setTargetId(itemVisited.getID()); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + List expectedPoints = List.of( + getExpectedCityViews("New York", 1) + ); + + // And request that item's TopCities report + getClient(adminToken).perform( + get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, + expectedPoints + ) + ))); + + // only admin has access to cities report + getClient(loggedInToken).perform( + get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) + .andExpect(status().isForbidden()); + + getClient().perform( + get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) + .andExpect(status().isUnauthorized()); + + // make statistics visible to all + configurationService.setProperty("usage-statistics.authorization.admin.usage", false); + + getClient(loggedInToken).perform( + get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, + expectedPoints + ) + ))); + + getClient().perform( + get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, + expectedPoints + ) + ))); + })); + ObjectMapper mapper = new ObjectMapper(); getClient(loggedInToken).perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); + } - List expectedPoints = List.of( - getExpectedCityViews("New York", 1) - ); - - // And request that item's TopCities report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID, - TOP_CITIES_REPORT_ID, - expectedPoints - ) - ))); - - // only admin has access to cities report - getClient(loggedInToken).perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) - .andExpect(status().isForbidden()); - - getClient().perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) - .andExpect(status().isUnauthorized()); - - // make statistics visible to all - configurationService.setProperty("usage-statistics.authorization.admin.usage", false); - - getClient(loggedInToken).perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID, - TOP_CITIES_REPORT_ID, - expectedPoints - ) - ))); - - getClient().perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID, - TOP_CITIES_REPORT_ID, - expectedPoints - ) - ))); - } - - /** - * Note: Geolite response mocked in {@link org.dspace.statistics.MockSolrLoggerServiceImpl} - */ - @Test - public void topCitiesReport_Community_Visited() throws Exception { - // ** WHEN ** - // We visit a Community thrice - ViewEventRest viewEventRest = new ViewEventRest(); - viewEventRest.setTargetType("community"); - viewEventRest.setTargetId(communityVisited.getID()); + /** + * Note: Geolite response mocked in {@link org.dspace.statistics.MockSolrLoggerServiceImpl} + */ + @Test + public void topCitiesReport_Community_Visited() throws Exception { + // ** WHEN ** + // We visit a Community thrice + ViewEventRest viewEventRest = new ViewEventRest(); + viewEventRest.setTargetType("community"); + viewEventRest.setTargetId(communityVisited.getID()); ObjectMapper mapper = new ObjectMapper(); @@ -1155,25 +1233,30 @@ public void topCitiesReport_Community_Visited() throws Exception { .contentType(contentType)) .andExpect(status().isCreated()); + Thread.sleep(1000); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + // And request that community's TopCities report + getClient(adminToken).perform( + get("/api/statistics/usagereports/" + communityVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + communityVisited.getID() + "_" + TOP_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, + List.of( + getExpectedCityViews("New York", 3) + ) + ) + ))); + })); + getClient(loggedInToken).perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - - // And request that community's TopCities report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + communityVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - communityVisited.getID() + "_" + TOP_CITIES_REPORT_ID, - TOP_CITIES_REPORT_ID, - List.of( - getExpectedCityViews("New York", 3) - ) - ) - ))); } /** @@ -1185,22 +1268,22 @@ public void topCitiesReport_Collection_NotVisited() throws Exception { // Collection is not visited // And request that collection's TopCountries report getClient(adminToken).perform( - get("/api/statistics/usagereports/" + collectionNotVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - collectionNotVisited.getID() + "_" + TOP_CITIES_REPORT_ID, - TOP_CITIES_REPORT_ID, - List.of() - ) - ))); + get("/api/statistics/usagereports/" + collectionNotVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + collectionNotVisited.getID() + "_" + TOP_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, + List.of() + ) + ))); } @Test public void usagereportsSearch_notProperURI_Exception() throws Exception { getClient(adminToken).perform(get("/api/statistics/usagereports/search/object?uri=BadUri")) - .andExpect(status().is(HttpStatus.BAD_REQUEST.value())); + .andExpect(status().is(HttpStatus.BAD_REQUEST.value())); } @Test @@ -1212,9 +1295,10 @@ public void usagereportsSearch_noURI_Exception() throws Exception { @Test public void usagereportsSearch_NonExistentUUID_Exception() throws Exception { getClient(adminToken).perform( - get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + + get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api" + + "/core" + "/items/" + UUID.randomUUID())) - .andExpect(status().is(HttpStatus.NOT_FOUND.value())); + .andExpect(status().is(HttpStatus.NOT_FOUND.value())); } @Test @@ -1242,7 +1326,7 @@ public void usagereportSearch_onlyAdminReadRights_unvalidToken() throws Exceptio // We request a dso's TotalVisits usage stat report with unvalid token getClient("unvalidToken") .perform(get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + - "/items/" + itemNotVisitedWithBitstreams.getID())) + "/items/" + itemNotVisitedWithBitstreams.getID())) // ** THEN ** .andExpect(status().isOk()) .andExpect(jsonPath("$", hasNoJsonPath("$.points"))); @@ -1274,7 +1358,7 @@ public void usagereportSearch_loggedInUserReadRights() throws Exception { // We request a dso's TotalVisits usage stat report as logged in eperson and has read policy for this user getClient(loggedInToken) .perform(get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + - "/items/" + itemNotVisitedWithBitstreams.getID())) + "/items/" + itemNotVisitedWithBitstreams.getID())) // ** THEN ** .andExpect(status().isOk()) .andExpect(jsonPath("$", hasNoJsonPath("$.points"))); @@ -1293,21 +1377,20 @@ public void usageReportsSearch_Site_mainReports() throws Exception { context.turnOffAuthorisationSystem(); Site site = SiteBuilder.createSite(context).build(); Item item = ItemBuilder.createItem(context, collectionNotVisited) + .withEntityType("Publication") .withTitle("My item") - .withType("Controlled Vocabulary for Resource Type Genres::image") .build(); Item item2 = ItemBuilder.createItem(context, collectionNotVisited) + .withEntityType("Patent") .withTitle("My item 2") - .withType("Controlled Vocabulary for Resource Type Genres::thesis") .build(); Item item3 = ItemBuilder.createItem(context, collectionNotVisited) + .withEntityType("Funding") .withTitle("My item 3") - .withType("Controlled Vocabulary for Resource Type Genres::thesis::bachelor thesis") .build(); Item item4 = ItemBuilder.createItem(context, collectionNotVisited) + .withEntityType("Project") .withTitle("My item 4") - .withType("Controlled Vocabulary for Resource Type Genres::text::periodical::" - + "journal::contribution to journal::journal article") .build(); context.restoreAuthSystemState(); @@ -1341,18 +1424,14 @@ public void usageReportsSearch_Site_mainReports() throws Exception { viewEventRest3.setTargetId(item3.getID()); getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest3)) - .contentType(contentType)) + .content(mapper.writeValueAsBytes(viewEventRest3)) + .contentType(contentType)) .andExpect(status().isCreated()); ViewEventRest viewEventRest4 = new ViewEventRest(); viewEventRest4.setTargetType("item"); viewEventRest4.setTargetId(item4.getID()); - getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest4)) - .contentType(contentType)) - .andExpect(status().isCreated()); UsageReportPointDsoTotalVisitsRest expectedPoint1 = new UsageReportPointDsoTotalVisitsRest(); expectedPoint1.addValue("views", 1); @@ -1378,7 +1457,8 @@ public void usageReportsSearch_Site_mainReports() throws Exception { expectedPoint4.setLabel("My item 4"); expectedPoint4.setId(item4.getID().toString()); - List points = List.of(expectedPoint1, expectedPoint2, expectedPoint3, expectedPoint4); + List points = + List.of(expectedPoint1, expectedPoint2, expectedPoint3, expectedPoint4); UsageReportPointCityRest pointCity = new UsageReportPointCityRest(); pointCity.addValue("views", 5); @@ -1390,52 +1470,86 @@ public void usageReportsSearch_Site_mainReports() throws Exception { UsageReportPointCountryRest pointCountry = new UsageReportPointCountryRest(); pointCountry.addValue("views", 5); - pointCountry.setIdAndLabel(Locale.US.getCountry(), Locale.US.getDisplayCountry(context.getCurrentLocale())); - - UsageReportPointCategoryRest articleCategory = new UsageReportPointCategoryRest(); - articleCategory.addValue("views", 1); - articleCategory.setId("article"); - - UsageReportPointCategoryRest thesisCategory = new UsageReportPointCategoryRest(); - thesisCategory.addValue("views", 3); - thesisCategory.setId("thesis"); - - UsageReportPointCategoryRest otherCategory = new UsageReportPointCategoryRest(); - otherCategory.addValue("views", 1); - otherCategory.setId("other"); + pointCountry.setIdAndLabel(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale())); + + UsageReportPointCategoryRest publicationCategory = new UsageReportPointCategoryRest(); + publicationCategory.addValue("views", 1); + publicationCategory.setId("publication"); + + UsageReportPointCategoryRest patentCategory = new UsageReportPointCategoryRest(); + patentCategory.addValue("views", 2); + patentCategory.setId("patent"); + + UsageReportPointCategoryRest fundingCategory = new UsageReportPointCategoryRest(); + fundingCategory.addValue("views", 1); + fundingCategory.setId("funding"); + + UsageReportPointCategoryRest projectCategory = new UsageReportPointCategoryRest(); + projectCategory.addValue("views", 1); + projectCategory.setId("project"); + + UsageReportPointCategoryRest productCategory = new UsageReportPointCategoryRest(); + productCategory.addValue("views", 0); + productCategory.setId("product"); + + UsageReportPointCategoryRest journalCategory = new UsageReportPointCategoryRest(); + journalCategory.addValue("views", 0); + journalCategory.setId("journal"); + + UsageReportPointCategoryRest personCategory = new UsageReportPointCategoryRest(); + personCategory.addValue("views", 0); + personCategory.setId("person"); + + UsageReportPointCategoryRest orgUnitCategory = new UsageReportPointCategoryRest(); + orgUnitCategory.addValue("views", 0); + orgUnitCategory.setId("orgunit"); + + UsageReportPointCategoryRest equipmentCategory = new UsageReportPointCategoryRest(); + equipmentCategory.addValue("views", 0); + equipmentCategory.setId("equipment"); + + UsageReportPointCategoryRest eventCategory = new UsageReportPointCategoryRest(); + eventCategory.addValue("views", 0); + eventCategory.setId("event"); + + List categories = List.of(publicationCategory, patentCategory, fundingCategory, + projectCategory, productCategory, journalCategory, + personCategory, orgUnitCategory, + equipmentCategory, eventCategory); + Thread.sleep(1000); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + // And request the sites global usage report (show top most popular items) + getClient(adminToken) + .perform(get("/api/statistics/usagereports/search/object") + .param("category", "site-mainReports") + .param("uri", "http://localhost:8080/server/api/core/sites/" + site.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + matchUsageReport(site.getID() + "_" + TOTAL_VISITS_REPORT_ID, TOP_ITEMS_REPORT_ID + , points), + matchUsageReport(site.getID() + "_" + TOP_CITIES_REPORT_ID, TOP_CITIES_REPORT_ID, + List.of(pointCity)), + matchUsageReport(site.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, getLastMonthVisitPoints(5)), + matchUsageReport(site.getID() + "_" + TOP_CONTINENTS_REPORT_ID, + TOP_CONTINENTS_REPORT_ID, + List.of(pointContinent)), + matchUsageReport(site.getID() + "_" + TOP_CATEGORIES_REPORT_ID, + TOP_CATEGORIES_REPORT_ID, + categories), + matchUsageReport(site.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + List.of(pointCountry))))); + })); - UsageReportPointCategoryRest bookCategory = new UsageReportPointCategoryRest(); - bookCategory.addValue("views", 0); - bookCategory.setId("book"); - - UsageReportPointCategoryRest bookChapterCategory = new UsageReportPointCategoryRest(); - bookChapterCategory.addValue("views", 0); - bookChapterCategory.setId("bookChapter"); - - UsageReportPointCategoryRest datasetCategory = new UsageReportPointCategoryRest(); - datasetCategory.addValue("views", 0); - datasetCategory.setId("dataset"); - - List categories = List.of(articleCategory, thesisCategory, otherCategory, bookCategory, - bookChapterCategory, datasetCategory); - - // And request the sites global usage report (show top most popular items) - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object") - .param("category", "site-mainReports") - .param("uri", "http://localhost:8080/server/api/core/sites/" + site.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( - matchUsageReport(site.getID() + "_" + TOTAL_VISITS_REPORT_ID, TOP_ITEMS_REPORT_ID, points), - matchUsageReport(site.getID() + "_" + TOP_CITIES_REPORT_ID, TOP_CITIES_REPORT_ID, List.of(pointCity)), - matchUsageReport(site.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, getLastMonthVisitPoints(5)), - matchUsageReport(site.getID() + "_" + TOP_CONTINENTS_REPORT_ID, TOP_CONTINENTS_REPORT_ID, - List.of(pointContinent)), - matchUsageReport(site.getID() + "_" + TOP_CATEGORIES_REPORT_ID, TOP_CATEGORIES_REPORT_ID, categories), - matchUsageReport(site.getID() + "_" + TOP_COUNTRIES_REPORT_ID, TOP_COUNTRIES_REPORT_ID, - List.of(pointCountry))))); + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest4)) + .contentType(contentType)) + .andExpect(status().isCreated()); } @Test @@ -1462,32 +1576,7 @@ public void usageReportsSearch_Site_downloadReports() throws Exception { Bitstream bitstream3 = createBitstream(item2, "Bitstream 3"); Bitstream bitstream4 = createBitstream(item3, "Bitstream 4"); - getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID() + "/content")) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID() + "/content")) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream2.getID() + "/content")) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID() + "/content")) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID() + "/content")) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID() + "/content")) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream4.getID() + "/content")) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream4.getID() + "/content")) - .andExpect(status().isOk()); - context.restoreAuthSystemState(); - UsageReportPointDsoTotalVisitsRest expectedPoint1 = new UsageReportPointDsoTotalVisitsRest(); expectedPoint1.addValue("views", 3); expectedPoint1.setType("item"); @@ -1518,7 +1607,34 @@ public void usageReportsSearch_Site_downloadReports() throws Exception { UsageReportPointCountryRest pointCountry = new UsageReportPointCountryRest(); pointCountry.addValue("views", 8); - pointCountry.setIdAndLabel(Locale.US.getCountry(), Locale.US.getDisplayCountry(context.getCurrentLocale())); + pointCountry.setIdAndLabel(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale())); + + getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream2.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream4.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream4.getID() + "/content")) + .andExpect(status().isOk()); + + Thread.sleep(1000); getClient(adminToken) .perform(get("/api/statistics/usagereports/search/object") @@ -1536,6 +1652,7 @@ TOTAL_VISITS_PER_MONTH_REPORT_ID, getLastMonthVisitPoints(8)), TOP_CONTINENTS_REPORT_ID, List.of(pointContinent)), matchUsageReport(site.getID() + "_" + TOP_DOWNLOAD_COUNTRIES_REPORT_ID, TOP_COUNTRIES_REPORT_ID, List.of(pointCountry))))); + } private Bitstream createBitstream(Item item, String name) throws Exception { @@ -1552,62 +1669,68 @@ public void usageReportsSearch_Community_Visited() throws Exception { viewEventRest.setTargetType("community"); viewEventRest.setTargetId(communityVisited.getID()); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisits.addValue("views", 1); + expectedPointTotalVisits.setType("community"); + expectedPointTotalVisits.setId(communityVisited.getID().toString()); + + UsageReportPointCityRest expectedPointCity = new UsageReportPointCityRest(); + expectedPointCity.addValue("views", 1); + expectedPointCity.setId("New York"); + + UsageReportPointCountryRest expectedPointCountry = new UsageReportPointCountryRest(); + expectedPointCountry.addValue("views", 1); + expectedPointCountry.setIdAndLabel(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale())); + + // And request the community usage reports + getClient(adminToken) + .perform(get("/api/statistics/usagereports/search/object?category=community-mainReports" + + "&uri=http://localhost:8080/server/api/core" + + "/communities/" + communityVisited.getID())) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + UsageReportMatcher.matchUsageReport( + communityVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + List.of( + getExpectedDsoViews(communityVisited, 1) + ) + ), + UsageReportMatcher.matchUsageReport( + communityVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + getLastMonthVisitPoints(1) + ), + UsageReportMatcher.matchUsageReport( + communityVisited.getID() + "_" + TOP_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, + List.of( + getExpectedCityViews("New York", 1) + ) + ), + UsageReportMatcher.matchUsageReport( + communityVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + List.of( + getExpectedCountryViews("US", "United States", 1) + ) + ) + ))); + })); + + ObjectMapper mapper = new ObjectMapper(); getClient().perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisits.addValue("views", 1); - expectedPointTotalVisits.setType("community"); - expectedPointTotalVisits.setId(communityVisited.getID().toString()); - - UsageReportPointCityRest expectedPointCity = new UsageReportPointCityRest(); - expectedPointCity.addValue("views", 1); - expectedPointCity.setId("New York"); - - UsageReportPointCountryRest expectedPointCountry = new UsageReportPointCountryRest(); - expectedPointCountry.addValue("views", 1); - expectedPointCountry.setIdAndLabel(Locale.US.getCountry(), - Locale.US.getDisplayCountry(context.getCurrentLocale())); - - // And request the community usage reports - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + - "/communities/" + communityVisited.getID())) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( - UsageReportMatcher.matchUsageReport( - communityVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - List.of( - getExpectedDsoViews(communityVisited, 1) - ) - ), - UsageReportMatcher.matchUsageReport( - communityVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - getLastMonthVisitPoints(1) - ), - UsageReportMatcher.matchUsageReport( - communityVisited.getID() + "_" + TOP_CITIES_REPORT_ID, - TOP_CITIES_REPORT_ID, - List.of( - getExpectedCityViews("New York", 1) - ) - ), - UsageReportMatcher.matchUsageReport( - communityVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, - List.of( - getExpectedCountryViews("US", "United States", 1) - ) - ) - ))); } @Test @@ -1616,8 +1739,9 @@ public void usageReportsSearch_Collection_NotVisited() throws Exception { // Collection is not visited // And request the collection's usage reports getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + - "/collections/" + collectionNotVisited.getID())) + .perform(get("/api/statistics/usagereports/search/object?category=collection-mainReports" + + "&uri=http://localhost:8080/server/api/core" + + "/collections/" + collectionNotVisited.getID())) // ** THEN ** .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) @@ -1649,6 +1773,82 @@ public void usageReportsSearch_Collection_NotVisited() throws Exception { @Test public void usageReportsSearch_Item_Visited_FileNotVisited() throws Exception { + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisits.addValue("views", 1); + expectedPointTotalVisits.setType("item"); + expectedPointTotalVisits.setId(itemVisited.getID().toString()); + + UsageReportPointCityRest expectedPointCity = new UsageReportPointCityRest(); + expectedPointCity.addValue("views", 1); + expectedPointCity.setId("New York"); + + UsageReportPointCountryRest expectedPointCountry = new UsageReportPointCountryRest(); + expectedPointCountry.addValue("views", 1); + expectedPointCountry.setIdAndLabel(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale())); + + //views and downloads + List totalDownloadsPoints = new ArrayList<>(); + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit1 = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisitsBit1.addValue("views", 1); + expectedPointTotalVisitsBit1.setType("item"); + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit2 = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisitsBit2.addValue("views", 0); + expectedPointTotalVisitsBit2.setType("bitstream"); + totalDownloadsPoints.add(expectedPointTotalVisitsBit1); + totalDownloadsPoints.add(expectedPointTotalVisitsBit2); + + + // And request the community usage reports + getClient(adminToken) + .perform( + get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server" + + "/api/core" + + "/items/" + itemVisited.getID())) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + List.of( + getExpectedDsoViews(itemVisited, 1) + ) + ), + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + getLastMonthVisitPoints(1) + ), + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, + List.of( + getExpectedCityViews("New York", 1) + ) + ), + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + List.of( + getExpectedCountryViews("US", "United States", 1) + ) + ), + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, + TOTAL_DOWNLOADS_REPORT_ID, + List.of() + ) + ))); + })); + // ** WHEN ** // We visit an item ViewEventRest viewEventRest = new ViewEventRest(); @@ -1660,74 +1860,7 @@ public void usageReportsSearch_Item_Visited_FileNotVisited() throws Exception { getClient().perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) - .andExpect(status().isCreated()); - - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisits.addValue("views", 1); - expectedPointTotalVisits.setType("item"); - expectedPointTotalVisits.setId(itemVisited.getID().toString()); - - UsageReportPointCityRest expectedPointCity = new UsageReportPointCityRest(); - expectedPointCity.addValue("views", 1); - expectedPointCity.setId("New York"); - - UsageReportPointCountryRest expectedPointCountry = new UsageReportPointCountryRest(); - expectedPointCountry.addValue("views", 1); - expectedPointCountry.setIdAndLabel(Locale.US.getCountry(), - Locale.US.getDisplayCountry(context.getCurrentLocale())); - - //views and downloads - List totalDownloadsPoints = new ArrayList<>(); - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit1 = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisitsBit1.addValue("views", 1); - expectedPointTotalVisitsBit1.setType("item"); - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit2 = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisitsBit2.addValue("views", 0); - expectedPointTotalVisitsBit2.setType("bitstream"); - totalDownloadsPoints.add(expectedPointTotalVisitsBit1); - totalDownloadsPoints.add(expectedPointTotalVisitsBit2); - - - // And request the community usage reports - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + - "/items/" + itemVisited.getID())) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - List.of( - getExpectedDsoViews(itemVisited, 1) - ) - ), - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - getLastMonthVisitPoints(1) - ), - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID, - TOP_CITIES_REPORT_ID, - List.of( - getExpectedCityViews("New York", 1) - ) - ), - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, - List.of( - getExpectedCountryViews("US", "United States", 1) - ) - ), - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, - TOTAL_DOWNLOADS_REPORT_ID, - List.of() - ) - ))); + .andExpect(status().isCreated()); } @Test @@ -1735,14 +1868,14 @@ public void usageReportsSearch_ItemVisited_FilesVisited() throws Exception { context.turnOffAuthorisationSystem(); Bitstream bitstream1 = BitstreamBuilder.createBitstream(context, itemVisited, - toInputStream("test", UTF_8)) - .withName("bitstream1") - .build(); + toInputStream("test", UTF_8)) + .withName("bitstream1") + .build(); Bitstream bitstream2 = BitstreamBuilder.createBitstream(context, itemVisited, - toInputStream("test", UTF_8)) - .withName("bitstream2") - .build(); + toInputStream("test", UTF_8)) + .withName("bitstream2") + .build(); context.restoreAuthSystemState(); // ** WHEN ** @@ -1774,98 +1907,112 @@ public void usageReportsSearch_ItemVisited_FilesVisited() throws Exception { .content(mapper.writeValueAsBytes(viewEventRestBit2)) .contentType(contentType)) .andExpect(status().isCreated()); + + Thread.sleep(3000); + + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisits.addValue("views", 1); + expectedPointTotalVisits.setType("item"); + expectedPointTotalVisits.setId(itemVisited.getID().toString()); + + UsageReportPointCityRest expectedPointCity = new UsageReportPointCityRest(); + expectedPointCity.addValue("views", 1); + expectedPointCity.setId("New York"); + + UsageReportPointCountryRest expectedPointCountry = new UsageReportPointCountryRest(); + expectedPointCountry.addValue("views", 1); + expectedPointCountry.setIdAndLabel(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale())); + + List totalDownloadsPoints = new ArrayList<>(); + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit1 = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisitsBit1.addValue("views", 1); + expectedPointTotalVisitsBit1.setLabel("bitstream1"); + expectedPointTotalVisitsBit1.setId(bitstream1.getID().toString()); + expectedPointTotalVisitsBit1.setType("bitstream"); + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit2 = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisitsBit2.addValue("views", 2); + expectedPointTotalVisitsBit2.setLabel("bitstream2"); + expectedPointTotalVisitsBit2.setId(bitstream2.getID().toString()); + expectedPointTotalVisitsBit2.setType("bitstream"); + totalDownloadsPoints.add(expectedPointTotalVisitsBit1); + totalDownloadsPoints.add(expectedPointTotalVisitsBit2); + + + // first point for views + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsItem = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisitsItem.addValue("views", 1); + expectedPointTotalVisitsItem.setType("item"); + + //second point for total downlods + UsageReportPointDsoTotalVisitsRest expectedPointTotalDownloads = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalDownloads.addValue("views", 3); + expectedPointTotalDownloads.setType("bitstream"); + + List usageReportPointRestsVisitsAndDownloads = new ArrayList<>(); + usageReportPointRestsVisitsAndDownloads.add(expectedPointTotalVisitsItem); + usageReportPointRestsVisitsAndDownloads.add(expectedPointTotalDownloads); + + + // And request the community usage reports + getClient(adminToken) + .perform( + get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server" + + "/api/core" + + "/items/" + itemVisited.getID())) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + List.of( + getExpectedDsoViews(itemVisited, 1) + ) + ), + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + getLastMonthVisitPoints(1) + ), + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, + List.of( + getExpectedCityViews("New York", 1) + ) + ), + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + List.of( + getExpectedCountryViews("US", "United States", 1) + ) + ), + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, + TOTAL_DOWNLOADS_REPORT_ID, + List.of( + getExpectedDsoViews(bitstream1, 1), + getExpectedDsoViews(bitstream2, 2) + ) + ) + ))); + })); + getClient().perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRestBit2)) .contentType(contentType)) - .andExpect(status().isCreated()); - - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisits.addValue("views", 1); - expectedPointTotalVisits.setType("item"); - expectedPointTotalVisits.setId(itemVisited.getID().toString()); - - UsageReportPointCityRest expectedPointCity = new UsageReportPointCityRest(); - expectedPointCity.addValue("views", 1); - expectedPointCity.setId("New York"); - - UsageReportPointCountryRest expectedPointCountry = new UsageReportPointCountryRest(); - expectedPointCountry.addValue("views", 1); - expectedPointCountry.setIdAndLabel(Locale.US.getCountry(), - Locale.US.getDisplayCountry(context.getCurrentLocale())); - - List totalDownloadsPoints = new ArrayList<>(); - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit1 = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisitsBit1.addValue("views", 1); - expectedPointTotalVisitsBit1.setLabel("bitstream1"); - expectedPointTotalVisitsBit1.setId(bitstream1.getID().toString()); - expectedPointTotalVisitsBit1.setType("bitstream"); - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit2 = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisitsBit2.addValue("views", 2); - expectedPointTotalVisitsBit2.setLabel("bitstream2"); - expectedPointTotalVisitsBit2.setId(bitstream2.getID().toString()); - expectedPointTotalVisitsBit2.setType("bitstream"); - totalDownloadsPoints.add(expectedPointTotalVisitsBit1); - totalDownloadsPoints.add(expectedPointTotalVisitsBit2); - - - // first point for views - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsItem = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisitsItem.addValue("views", 1); - expectedPointTotalVisitsItem.setType("item"); - - //second point for total downlods - UsageReportPointDsoTotalVisitsRest expectedPointTotalDownloads = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalDownloads.addValue("views", 3); - expectedPointTotalDownloads.setType("bitstream"); - - List usageReportPointRestsVisitsAndDownloads = new ArrayList<>(); - usageReportPointRestsVisitsAndDownloads.add(expectedPointTotalVisitsItem); - usageReportPointRestsVisitsAndDownloads.add(expectedPointTotalDownloads); - - - // And request the community usage reports - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + - "/items/" + itemVisited.getID())) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - List.of( - getExpectedDsoViews(itemVisited, 1) - ) - ), - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - getLastMonthVisitPoints(1) - ), - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID, - TOP_CITIES_REPORT_ID, - List.of( - getExpectedCityViews("New York", 1) - ) - ), - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, - List.of( - getExpectedCountryViews("US", "United States", 1) - ) - ), - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, - TOTAL_DOWNLOADS_REPORT_ID, - List.of( - getExpectedDsoViews(bitstream1, 1), - getExpectedDsoViews(bitstream2, 2) - ) - ) - ))); + .andExpect(status().isCreated()); } @Test @@ -1876,55 +2023,61 @@ public void usageReportsSearch_Bitstream_Visited() throws Exception { viewEventRest.setTargetType("bitstream"); viewEventRest.setTargetId(bitstreamVisited.getID()); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + List expectedTotalVisits = List.of( + getExpectedDsoViews(bitstreamVisited, 1) + ); + + // And request the community usage reports + getClient(adminToken) + .perform( + get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server" + + "/api/core" + + "/items/" + bitstreamVisited.getID())) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + UsageReportMatcher.matchUsageReport( + bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + expectedTotalVisits + ), + UsageReportMatcher.matchUsageReport( + bitstreamVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + getLastMonthVisitPoints(1) + ), + UsageReportMatcher.matchUsageReport( + bitstreamVisited.getID() + "_" + TOP_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, + List.of( + getExpectedCityViews("New York", 1) + ) + ), + UsageReportMatcher.matchUsageReport( + bitstreamVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + List.of( + getExpectedCountryViews("US", "United States", 1) + ) + ), + UsageReportMatcher.matchUsageReport( + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, + TOTAL_DOWNLOADS_REPORT_ID, + expectedTotalVisits + ) + ))); + })); + ObjectMapper mapper = new ObjectMapper(); getClient().perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) - .andExpect(status().isCreated()); - - List expectedTotalVisits = List.of( - getExpectedDsoViews(bitstreamVisited, 1) - ); - - // And request the community usage reports - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + - "/items/" + bitstreamVisited.getID())) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( - UsageReportMatcher.matchUsageReport( - bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - expectedTotalVisits - ), - UsageReportMatcher.matchUsageReport( - bitstreamVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - getLastMonthVisitPoints(1) - ), - UsageReportMatcher.matchUsageReport( - bitstreamVisited.getID() + "_" + TOP_CITIES_REPORT_ID, - TOP_CITIES_REPORT_ID, - List.of( - getExpectedCityViews("New York", 1) - ) - ), - UsageReportMatcher.matchUsageReport( - bitstreamVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, - List.of( - getExpectedCountryViews("US", "United States", 1) - ) - ), - UsageReportMatcher.matchUsageReport( - bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, - TOTAL_DOWNLOADS_REPORT_ID, - expectedTotalVisits - ) - ))); + .andExpect(status().isCreated()); } // This test search for statistics before the moment in which item is visited @@ -1934,6 +2087,95 @@ public void usageReportsSearch_ItemNotVisited_AtTime() throws Exception { Site site = SiteBuilder.createSite(context).build(); //create new item using ItemBuilder context.restoreAuthSystemState(); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + //create expected raport points + List points = new ArrayList<>(); + UsageReportPointDsoTotalVisitsRest expectedPoint1 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint1.addValue("views", 0); + expectedPoint1.setType("item"); + points.add(expectedPoint1); + + + UsageReportPointCategoryRest publicationCategory = new UsageReportPointCategoryRest(); + publicationCategory.addValue("views", 0); + publicationCategory.setId("publication"); + + UsageReportPointCategoryRest patentCategory = new UsageReportPointCategoryRest(); + patentCategory.addValue("views", 0); + patentCategory.setId("patent"); + + UsageReportPointCategoryRest fundingCategory = new UsageReportPointCategoryRest(); + fundingCategory.addValue("views", 0); + fundingCategory.setId("funding"); + + UsageReportPointCategoryRest projectCategory = new UsageReportPointCategoryRest(); + projectCategory.addValue("views", 0); + projectCategory.setId("project"); + + UsageReportPointCategoryRest productCategory = new UsageReportPointCategoryRest(); + productCategory.addValue("views", 0); + productCategory.setId("product"); + + UsageReportPointCategoryRest journalCategory = new UsageReportPointCategoryRest(); + journalCategory.addValue("views", 0); + journalCategory.setId("journal"); + + UsageReportPointCategoryRest personCategory = new UsageReportPointCategoryRest(); + personCategory.addValue("views", 0); + personCategory.setId("person"); + + UsageReportPointCategoryRest orgUnitCategory = new UsageReportPointCategoryRest(); + orgUnitCategory.addValue("views", 0); + orgUnitCategory.setId("orgunit"); + + UsageReportPointCategoryRest equipmentCategory = new UsageReportPointCategoryRest(); + equipmentCategory.addValue("views", 0); + equipmentCategory.setId("equipment"); + + UsageReportPointCategoryRest eventCategory = new UsageReportPointCategoryRest(); + eventCategory.addValue("views", 0); + eventCategory.setId("event"); + + List categories = List.of(publicationCategory, patentCategory, + fundingCategory, + projectCategory, productCategory, journalCategory, + personCategory, orgUnitCategory, + equipmentCategory, eventCategory); + + UsageReportPointRest pointPerMonth = new UsageReportPointDateRest(); + pointPerMonth.setId("June 2019"); + pointPerMonth.addValue("views", 0); + + List pointsPerMonth = List.of(pointPerMonth); + + // And request the sites global usage report (show top most popular items) for a specific date range + // we expect no points becase we are searching in a moment before the view of item happened + getClient(adminToken) + .perform( + get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server" + + "/api/core" + + "/sites/" + site.getID() + + "&startDate=2019-06-01&endDate=2019-06-02&category=site-mainReports")) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + matchUsageReport(site.getID() + "_" + TOTAL_VISITS_REPORT_ID, TOP_ITEMS_REPORT_ID + , points), + matchUsageReport(site.getID() + "_" + TOP_CITIES_REPORT_ID, TOP_CITIES_REPORT_ID, + List.of()), + matchUsageReport(site.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, pointsPerMonth), + matchUsageReport(site.getID() + "_" + TOP_CONTINENTS_REPORT_ID, + TOP_CONTINENTS_REPORT_ID, List.of()), + matchUsageReport(site.getID() + "_" + TOP_CATEGORIES_REPORT_ID, + TOP_CATEGORIES_REPORT_ID, categories), + matchUsageReport(site.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, List.of())))); + })); + //visit first item now ViewEventRest viewEventRest = new ViewEventRest(); viewEventRest.setTargetType("item"); @@ -1943,108 +2185,45 @@ public void usageReportsSearch_ItemNotVisited_AtTime() throws Exception { getClient().perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) - .andExpect(status().isCreated()); - //create expected raport points - List points = new ArrayList<>(); - UsageReportPointDsoTotalVisitsRest expectedPoint1 = new UsageReportPointDsoTotalVisitsRest(); - expectedPoint1.addValue("views", 0); - expectedPoint1.setType("item"); - points.add(expectedPoint1); - - UsageReportPointCategoryRest articleCategory = new UsageReportPointCategoryRest(); - articleCategory.addValue("views", 0); - articleCategory.setId("article"); - - UsageReportPointCategoryRest thesisCategory = new UsageReportPointCategoryRest(); - thesisCategory.addValue("views", 0); - thesisCategory.setId("thesis"); - - UsageReportPointCategoryRest otherCategory = new UsageReportPointCategoryRest(); - otherCategory.addValue("views", 0); - otherCategory.setId("other"); - - UsageReportPointCategoryRest bookCategory = new UsageReportPointCategoryRest(); - bookCategory.addValue("views", 0); - bookCategory.setId("book"); - - UsageReportPointCategoryRest bookChapterCategory = new UsageReportPointCategoryRest(); - bookChapterCategory.addValue("views", 0); - bookChapterCategory.setId("bookChapter"); - - UsageReportPointCategoryRest datasetCategory = new UsageReportPointCategoryRest(); - datasetCategory.addValue("views", 0); - datasetCategory.setId("dataset"); - - List categories = List.of(articleCategory, thesisCategory, otherCategory, bookCategory, - bookChapterCategory, datasetCategory); - - UsageReportPointRest pointPerMonth = new UsageReportPointDateRest(); - pointPerMonth.setId("June 2019"); - pointPerMonth.addValue("views", 0); - - List pointsPerMonth = List.of(pointPerMonth); - - // And request the sites global usage report (show top most popular items) for a specific date range - // we expect no points becase we are searching in a moment before the view of item happened - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + - "/sites/" + site.getID() + "&startDate=2019-06-01&endDate=2019-06-02&category=site-mainReports")) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( - matchUsageReport(site.getID() + "_" + TOTAL_VISITS_REPORT_ID, TOP_ITEMS_REPORT_ID, points), - matchUsageReport(site.getID() + "_" + TOP_CITIES_REPORT_ID, TOP_CITIES_REPORT_ID, List.of()), - matchUsageReport(site.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, pointsPerMonth), - matchUsageReport(site.getID() + "_" + TOP_CONTINENTS_REPORT_ID, - TOP_CONTINENTS_REPORT_ID,List.of()), - matchUsageReport(site.getID() + "_" + TOP_CATEGORIES_REPORT_ID, - TOP_CATEGORIES_REPORT_ID, categories), - matchUsageReport(site.getID() + "_" + TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, List.of())))); + .andExpect(status().isCreated()); } // This test search for statistics one day after the moment in which community is visited @Test public void usageReportsSearch_Community_VisitedAtTime() throws Exception { - // ** WHEN ** - // We visit a community - ViewEventRest viewEventRest = new ViewEventRest(); - viewEventRest.setTargetType("community"); - viewEventRest.setTargetId(communityVisited.getID()); - - ObjectMapper mapper = new ObjectMapper(); - getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest)) - .contentType(contentType)) - .andExpect(status().isCreated()); - getExpectedDsoViews(communityVisited, 1); - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = getExpectedDsoViews(communityVisited, 1); - - UsageReportPointCityRest expectedPointCity = getExpectedCityViews("New York", 1); - - UsageReportPointCountryRest expectedPointCountry = getExpectedCountryViews(Locale.US.getCountry(), - Locale.US.getDisplayCountry(context.getCurrentLocale()), 1); - - //add one day to the moment when we visit the community - DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); - Calendar cal = Calendar.getInstance(); - cal.add(Calendar.DATE, 1); - String endDate = dateFormat.format(cal.getTime()); - // And request the community usage reports - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + - "/communities/" + communityVisited.getID() + "&startDate=2019-06-01&endDate=" + endDate)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + getExpectedDsoViews(communityVisited, 1); + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = + getExpectedDsoViews(communityVisited, 1); + + UsageReportPointCityRest expectedPointCity = getExpectedCityViews("New York", 1); + + UsageReportPointCountryRest expectedPointCountry = getExpectedCountryViews(Locale.US.getCountry(), + Locale.US.getDisplayCountry( + context.getCurrentLocale()), + 1); + + //add one day to the moment when we visit the community + DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); + Calendar cal = Calendar.getInstance(); + cal.add(Calendar.DATE, 1); + String endDate = dateFormat.format(cal.getTime()); + // And request the community usage reports + getClient(adminToken) + .perform(get("/api/statistics/usagereports/search/object?category=community-mainReports" + + "&uri=http://localhost:8080/server/api/core" + + "/communities/" + communityVisited.getID() + "&startDate=2019-06-01&endDate=" + + endDate)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( UsageReportMatcher .matchUsageReport(communityVisited.getID() + "_" + - TOTAL_VISITS_REPORT_ID, TOTAL_VISITS_REPORT_ID, - Arrays.asList(expectedPointTotalVisits)), + TOTAL_VISITS_REPORT_ID, TOTAL_VISITS_REPORT_ID, + List.of(expectedPointTotalVisits)), UsageReportMatcher.matchUsageReport(communityVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, TOTAL_VISITS_PER_MONTH_REPORT_ID, @@ -2056,58 +2235,67 @@ public void usageReportsSearch_Community_VisitedAtTime() throws Exception { TOP_COUNTRIES_REPORT_ID, TOP_COUNTRIES_REPORT_ID, Arrays.asList(expectedPointCountry)) - ))); - } + ))); + })); - // filter bitstream only with start date - @Test - public void usageReportsSearch_Bitstream_VisitedFromTime() throws Exception { // ** WHEN ** - // We visit a bitstream + // We visit a community ViewEventRest viewEventRest = new ViewEventRest(); - viewEventRest.setTargetType("bitstream"); - viewEventRest.setTargetId(bitstreamVisited.getID()); + viewEventRest.setTargetType("community"); + viewEventRest.setTargetId(communityVisited.getID()); ObjectMapper mapper = new ObjectMapper(); getClient().perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) - .andExpect(status().isCreated()); - - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisits.addValue("views", 1); - expectedPointTotalVisits.setType("bitstream"); - expectedPointTotalVisits.setLabel("BitstreamVisitedName"); - expectedPointTotalVisits.setId(bitstreamVisited.getID().toString()); - - UsageReportPointCityRest expectedPointCity = new UsageReportPointCityRest(); - expectedPointCity.addValue("views", 1); - expectedPointCity.setId("New York"); + .andExpect(status().isCreated()); + } - UsageReportPointCountryRest expectedPointCountry = new UsageReportPointCountryRest(); - expectedPointCountry.addValue("views", 1); - expectedPointCountry.setIdAndLabel(Locale.US.getCountry(), - Locale.US.getDisplayCountry(context.getCurrentLocale())); + // filter bitstream only with start date + @Test + public void usageReportsSearch_Bitstream_VisitedFromTime() throws Exception { - //downloads and views expected points - List totalDownloadsPoints = new ArrayList<>(); - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit1 = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisitsBit1.addValue("views", 1); - expectedPointTotalVisitsBit1.setType("bitstream"); - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit2 = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisitsBit2.addValue("views", 0); - expectedPointTotalVisitsBit2.setType("bitstream"); - totalDownloadsPoints.add(expectedPointTotalVisitsBit1); - totalDownloadsPoints.add(expectedPointTotalVisitsBit2); - // And request the community usage reports - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + - "/items/" + bitstreamVisited.getID() + "&startDate=2019-05-01")) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisits.addValue("views", 1); + expectedPointTotalVisits.setType("bitstream"); + expectedPointTotalVisits.setLabel("BitstreamVisitedName"); + expectedPointTotalVisits.setId(bitstreamVisited.getID().toString()); + + UsageReportPointCityRest expectedPointCity = new UsageReportPointCityRest(); + expectedPointCity.addValue("views", 1); + expectedPointCity.setId("New York"); + + UsageReportPointCountryRest expectedPointCountry = new UsageReportPointCountryRest(); + expectedPointCountry.addValue("views", 1); + expectedPointCountry.setIdAndLabel(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale())); + + //downloads and views expected points + List totalDownloadsPoints = new ArrayList<>(); + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit1 = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisitsBit1.addValue("views", 1); + expectedPointTotalVisitsBit1.setType("bitstream"); + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit2 = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisitsBit2.addValue("views", 0); + expectedPointTotalVisitsBit2.setType("bitstream"); + totalDownloadsPoints.add(expectedPointTotalVisitsBit1); + totalDownloadsPoints.add(expectedPointTotalVisitsBit2); + // And request the community usage reports + getClient(adminToken) + .perform( + get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server" + + "/api/core" + + "/items/" + bitstreamVisited.getID() + "&startDate=2019-05-01")) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( UsageReportMatcher.matchUsageReport(bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, TOTAL_VISITS_REPORT_ID, Arrays.asList(expectedPointTotalVisits)), @@ -2126,8 +2314,23 @@ public void usageReportsSearch_Bitstream_VisitedFromTime() throws Exception { TOTAL_DOWNLOADS_REPORT_ID, TOTAL_DOWNLOADS_REPORT_ID, Arrays.asList(expectedPointTotalVisits)) - ))); + ))); + })); + + // ** WHEN ** + // We visit a bitstream + ViewEventRest viewEventRest = new ViewEventRest(); + viewEventRest.setTargetType("bitstream"); + viewEventRest.setTargetId(bitstreamVisited.getID()); + + ObjectMapper mapper = new ObjectMapper(); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest)) + .contentType(contentType)) + .andExpect(status().isCreated()); } + //test for inverse relation between person and publication @Test public void usageReportsSearch_PersonWithPublicationVisited() throws Exception { @@ -2156,36 +2359,6 @@ public void usageReportsSearch_PersonWithPublicationVisited() throws Exception { viewEventRestSecondPublicationBitstream.setTargetType("bitstream"); viewEventRestSecondPublicationBitstream.setTargetId(bitstreampublication_second.getID()); - //create viewevents for all of items and bistreams - ObjectMapper mapper = new ObjectMapper(); - getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRestItem)) - .contentType(contentType)) - .andExpect(status().isCreated()); - - getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRestFirstPublicationOfPerson)) - .contentType(contentType)) - .andExpect(status().isCreated()); - - getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRestSecondPublicationOfPerson)) - .contentType(contentType)) - .andExpect(status().isCreated()); - - getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRestSecondPublicationOfPerson)) - .contentType(contentType)) - .andExpect(status().isCreated()); - getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRestFirstPublicationBitstream)) - .contentType(contentType)) - .andExpect(status().isCreated()); - - getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRestSecondPublicationBitstream)) - .contentType(contentType)) - .andExpect(status().isCreated()); //create expected report points for visits UsageReportPointDsoTotalVisitsRest totalVisitRelation = new UsageReportPointDsoTotalVisitsRest(); @@ -2200,7 +2373,8 @@ public void usageReportsSearch_PersonWithPublicationVisited() throws Exception { expectedPointTotal.setLabel(person.getName()); expectedPointTotal.setId(person.getID().toString()); - UsageReportPointDsoTotalVisitsRest totalVisitRelationProjects = new UsageReportPointDsoTotalVisitsRest(); + UsageReportPointDsoTotalVisitsRest totalVisitRelationProjects = + new UsageReportPointDsoTotalVisitsRest(); totalVisitRelationProjects.addValue("views", 0); totalVisitRelationProjects.setType("item"); totalVisitRelationProjects.setLabel("Views"); @@ -2224,62 +2398,102 @@ public void usageReportsSearch_PersonWithPublicationVisited() throws Exception { expectedPointCountryWithRelation.addValue("views", 3); expectedPointCountryWithRelation.setIdAndLabel(Locale.US.getCountry(), Locale.US.getDisplayCountry(context.getCurrentLocale())); - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + - "/items/" + person.getID().toString())) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + + //create viewevents for all of items and bistreams + ObjectMapper mapper = new ObjectMapper(); + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRestItem)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRestFirstPublicationOfPerson)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRestSecondPublicationOfPerson)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRestSecondPublicationOfPerson)) + .contentType(contentType)) + .andExpect(status().isCreated()); + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRestFirstPublicationBitstream)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + Thread.sleep(1000); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + getClient(adminToken) + .perform( + get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server" + + "/api/core" + + "/items/" + person.getID().toString())) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - Arrays.asList(expectedPointTotal)), + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + List.of(expectedPointTotal)), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOTAL_VISITS_REPORT_ID_RELATION_PERSON_RESEARCHOUTPUTS, - TOTAL_VISITS_REPORT_ID, - Arrays.asList(totalVisitRelation)), + TOTAL_VISITS_REPORT_ID_RELATION_PERSON_RESEARCHOUTPUTS, + TOTAL_VISITS_REPORT_ID, + List.of(totalVisitRelation)), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - getLastMonthVisitPoints(1)), + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + getLastMonthVisitPoints(1)), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOTAL_VISITS_PER_MONTH_REPORT_ID_RELATION_PERSON_RESEARCHOUTPUTS, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - getLastMonthVisitPoints(3)), + TOTAL_VISITS_PER_MONTH_REPORT_ID_RELATION_PERSON_RESEARCHOUTPUTS, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + getLastMonthVisitPoints(3)), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOTAL_VISITS_PER_MONTH_REPORT_ID_RELATION_PERSON_PROJECTS, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - getLastMonthVisitPoints(0)), + TOTAL_VISITS_PER_MONTH_REPORT_ID_RELATION_PERSON_PROJECTS, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + getLastMonthVisitPoints(0)), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOP_CITIES_REPORT_ID, TOP_CITIES_REPORT_ID, - Arrays.asList(expectedPointCity)), + TOP_CITIES_REPORT_ID, TOP_CITIES_REPORT_ID, + List.of(expectedPointCity)), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOP_CITIES_REPORT_ID_RELATION_PERSON_RESEARCHOUTPUTS, - TOP_CITIES_REPORT_ID, - Arrays.asList(expectedPointCityWithRelation)), + TOP_CITIES_REPORT_ID_RELATION_PERSON_RESEARCHOUTPUTS, + TOP_CITIES_REPORT_ID, + List.of(expectedPointCityWithRelation)), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOP_CITIES_REPORT_ID_RELATION_PERSON_PROJECTS, - TOP_CITIES_REPORT_ID, - Collections.emptyList()), + TOP_CITIES_REPORT_ID_RELATION_PERSON_PROJECTS, + TOP_CITIES_REPORT_ID, + Collections.emptyList()), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, - Arrays.asList(expectedPointCountry)), + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + List.of(expectedPointCountry)), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOP_COUNTRIES_REPORT_ID_RELATION_PERSON_RESEARCHOUTPUTS, - TOP_COUNTRIES_REPORT_ID, - Arrays.asList(expectedPointCountryWithRelation)), + TOP_COUNTRIES_REPORT_ID_RELATION_PERSON_RESEARCHOUTPUTS, + TOP_COUNTRIES_REPORT_ID, + List.of(expectedPointCountryWithRelation)), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOP_COUNTRIES_REPORT_ID_RELATION_PERSON_PROJECTS, - TOP_COUNTRIES_REPORT_ID, - Collections.emptyList()), + TOP_COUNTRIES_REPORT_ID_RELATION_PERSON_PROJECTS, + TOP_COUNTRIES_REPORT_ID, + Collections.emptyList()), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOTAL_VISITS_REPORT_ID_RELATION_PERSON_PROJECTS, - TOTAL_VISITS_REPORT_ID, - Arrays.asList(totalVisitRelationProjects)) - ))); + TOTAL_VISITS_REPORT_ID_RELATION_PERSON_PROJECTS, + TOTAL_VISITS_REPORT_ID, + List.of(totalVisitRelationProjects)) + ))); + })); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRestSecondPublicationBitstream)) + .contentType(contentType)) + .andExpect(status().isCreated()); } + //test for inverse relation between orgunit and publication @Test public void usageReportsSearch_OrgUnitWithPublicationVisited() throws Exception { @@ -2344,95 +2558,661 @@ public void usageReportsSearch_OrgUnitWithPublicationVisited() throws Exception .contentType(contentType)) .andExpect(status().isCreated()); - getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRestSecondPublicationBitstream)) - .contentType(contentType)) - .andExpect(status().isCreated()); - - //create expected report points for visits - UsageReportPointDsoTotalVisitsRest totalVisitRelation = new UsageReportPointDsoTotalVisitsRest(); - totalVisitRelation.addValue("views", 3); - totalVisitRelation.setType("item"); - totalVisitRelation.setLabel("Views"); - totalVisitRelation.setId(orgUnit.getID().toString()); - - //create expected report points for city visits with relation - UsageReportPointCityRest expectedPointCityWithRelation = getExpectedCityViews("New York", 3); - - //create expected report points for country visits with relation - UsageReportPointCountryRest expectedPointCountryWithRelation = getExpectedCountryViews(Locale.US.getCountry(), - Locale.US.getDisplayCountry(context.getCurrentLocale()), 3); - - //top items expected report points - List points = new ArrayList<>(); - //first publication - UsageReportPointDsoTotalVisitsRest expectedPoint1 = getExpectedDsoViews(publicationVisited2, 2); - points.add(expectedPoint1); - //second publication - UsageReportPointDsoTotalVisitsRest expectedPoint2 = getExpectedDsoViews(publicationVisited1, 1); - points.add(expectedPoint2); - - //total downloads expected points - List totalDownloadsPoints = new ArrayList<>(); - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit1 = getExpectedDsoViews( - bitstreampublication_first, 1); - - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit2 = getExpectedDsoViews( - bitstreampublication_second, 1); - - totalDownloadsPoints.add(expectedPointTotalVisitsBit1); - totalDownloadsPoints.add(expectedPointTotalVisitsBit2); - - //total downloads and views expected points - //views - List totalDownloadsAndViewsPoints = new ArrayList<>(); - UsageReportPointDsoTotalVisitsRest views = new UsageReportPointDsoTotalVisitsRest(); - views.addValue("views", 3); - views.setType("item"); - views.setLabel("Item visits"); - //downloads - UsageReportPointDsoTotalVisitsRest downloads = new UsageReportPointDsoTotalVisitsRest(); - downloads.addValue("views", 2); - downloads.setType("bitstream"); - downloads.setLabel("File visits"); - totalDownloadsAndViewsPoints.add(views); - totalDownloadsAndViewsPoints.add(downloads); - - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + - "/items/" + orgUnit.getID().toString()) + Thread.sleep(1000); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + + //create expected report points for visits + UsageReportPointDsoTotalVisitsRest totalVisitRelation = new UsageReportPointDsoTotalVisitsRest(); + totalVisitRelation.addValue("views", 3); + totalVisitRelation.setType("item"); + totalVisitRelation.setLabel("Views"); + totalVisitRelation.setId(orgUnit.getID().toString()); + + //create expected report points for city visits with relation + UsageReportPointCityRest expectedPointCityWithRelation = getExpectedCityViews("New York", 3); + + //create expected report points for country visits with relation + UsageReportPointCountryRest expectedPointCountryWithRelation = + getExpectedCountryViews(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale()), 3); + + //top items expected report points + List points = new ArrayList<>(); + //first publication + UsageReportPointDsoTotalVisitsRest expectedPoint1 = getExpectedDsoViews(publicationVisited2, 2); + points.add(expectedPoint1); + //second publication + UsageReportPointDsoTotalVisitsRest expectedPoint2 = getExpectedDsoViews(publicationVisited1, 1); + points.add(expectedPoint2); + + //total downloads expected points + List totalDownloadsPoints = new ArrayList<>(); + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit1 = getExpectedDsoViews( + bitstreampublication_first, 1); + + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit2 = getExpectedDsoViews( + bitstreampublication_second, 1); + + totalDownloadsPoints.add(expectedPointTotalVisitsBit1); + totalDownloadsPoints.add(expectedPointTotalVisitsBit2); + + //total downloads and views expected points + //views + List totalDownloadsAndViewsPoints = new ArrayList<>(); + UsageReportPointDsoTotalVisitsRest views = new UsageReportPointDsoTotalVisitsRest(); + views.addValue("views", 3); + views.setType("item"); + views.setLabel("Item visits"); + //downloads + UsageReportPointDsoTotalVisitsRest downloads = new UsageReportPointDsoTotalVisitsRest(); + downloads.addValue("views", 2); + downloads.setType("bitstream"); + downloads.setLabel("File visits"); + totalDownloadsAndViewsPoints.add(views); + totalDownloadsAndViewsPoints.add(downloads); + + getClient(adminToken) + .perform( + get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server" + + "/api/core" + + "/items/" + orgUnit.getID().toString()) .param("size", "50")) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.hasItems( + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.hasItems( UsageReportMatcher.matchUsageReport(orgUnit.getID() + "_" + - TOTAL_VISITS_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, - TOTAL_VISITS_REPORT_ID, - Arrays.asList(totalVisitRelation)), + TOTAL_VISITS_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, + TOTAL_VISITS_REPORT_ID, + List.of(totalVisitRelation)), UsageReportMatcher.matchUsageReport(orgUnit.getID() + "_" + - TOTAL_VISITS_PER_MONTH_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - getLastMonthVisitPoints(3)), + TOTAL_VISITS_PER_MONTH_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + getLastMonthVisitPoints(3)), UsageReportMatcher.matchUsageReport(orgUnit.getID() + "_" + - TOP_CITIES_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, - TOP_CITIES_REPORT_ID, - Arrays.asList(expectedPointCityWithRelation)), + TOP_CITIES_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, + TOP_CITIES_REPORT_ID, + List.of(expectedPointCityWithRelation)), UsageReportMatcher.matchUsageReport(orgUnit.getID() + "_" + - TOP_COUNTRIES_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, - TOP_COUNTRIES_REPORT_ID, - Arrays.asList(expectedPointCountryWithRelation)), + TOP_COUNTRIES_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, + TOP_COUNTRIES_REPORT_ID, + List.of(expectedPointCountryWithRelation)), UsageReportMatcher.matchUsageReport(orgUnit.getID() + "_" + - TOP_ITEMS_REPORT_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, - TOP_ITEMS_REPORT_ID, points), + TOP_ITEMS_REPORT_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, + TOP_ITEMS_REPORT_ID, points), UsageReportMatcher.matchUsageReport(orgUnit.getID() + "_" + - TOTAL_DOWNLOADS_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, - TOTAL_DOWNLOADS_REPORT_ID, totalDownloadsPoints), + TOTAL_DOWNLOADS_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, + TOTAL_DOWNLOADS_REPORT_ID, totalDownloadsPoints), UsageReportMatcher.matchUsageReport(orgUnit.getID() + "_" + - TOTAL_VISITS_TOTAL_DOWNLOADS_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, - TOTAL_VISITS_TOTAL_DOWNLOADS, - totalDownloadsAndViewsPoints) - ))); + TOTAL_VISITS_TOTAL_DOWNLOADS_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, + TOTAL_VISITS_TOTAL_DOWNLOADS, + totalDownloadsAndViewsPoints) + ))); + })); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRestSecondPublicationBitstream)) + .contentType(contentType)) + .andExpect(status().isCreated()); + } + + @Test + public void usageReportsSearch_Collection_ItemReports() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context).build(); + collectionNotVisited = CollectionBuilder.createCollection(context, community) + .withEntityType("Publication") + .build(); + + Item item = ItemBuilder.createItem(context, collectionNotVisited) + .withTitle("My item") + .withType("Controlled Vocabulary for Resource Type Genres::image") + .build(); + Item item2 = ItemBuilder.createItem(context, collectionNotVisited) + .withTitle("My item 2") + .withType("Controlled Vocabulary for Resource Type Genres::thesis") + .build(); + Item item3 = ItemBuilder.createItem(context, collectionNotVisited) + .withTitle("My item 3") + .withType("Controlled Vocabulary for Resource Type Genres::thesis::bachelor thesis") + .build(); + Item item4 = ItemBuilder.createItem(context, collectionNotVisited) + .withTitle("My item 4") + .withType("Controlled Vocabulary for Resource Type Genres::text::periodical::" + + "journal::contribution to journal::journal article") + .build(); + context.restoreAuthSystemState(); + + ObjectMapper mapper = new ObjectMapper(); + + ViewEventRest viewEventRest = new ViewEventRest(); + viewEventRest.setTargetType("item"); + viewEventRest.setTargetId(item.getID()); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + ViewEventRest viewEventRest2 = new ViewEventRest(); + viewEventRest2.setTargetType("item"); + viewEventRest2.setTargetId(item2.getID()); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest2)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest2)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + ViewEventRest viewEventRest3 = new ViewEventRest(); + viewEventRest3.setTargetType("item"); + viewEventRest3.setTargetId(item3.getID()); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest3)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + Thread.sleep(1000); + + ViewEventRest viewEventRest4 = new ViewEventRest(); + viewEventRest4.setTargetType("item"); + viewEventRest4.setTargetId(item4.getID()); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + + UsageReportPointDsoTotalVisitsRest expectedPoint1 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint1.addValue("views", 1); + expectedPoint1.setType("item"); + expectedPoint1.setLabel("My item"); + expectedPoint1.setId(item.getID().toString()); + + UsageReportPointDsoTotalVisitsRest expectedPoint2 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint2.addValue("views", 2); + expectedPoint2.setType("item"); + expectedPoint2.setLabel("My item 2"); + expectedPoint2.setId(item2.getID().toString()); + + UsageReportPointDsoTotalVisitsRest expectedPoint3 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint3.addValue("views", 1); + expectedPoint3.setType("item"); + expectedPoint3.setLabel("My item 3"); + expectedPoint3.setId(item3.getID().toString()); + + UsageReportPointDsoTotalVisitsRest expectedPoint4 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint4.addValue("views", 1); + expectedPoint4.setType("item"); + expectedPoint4.setLabel("My item 4"); + expectedPoint4.setId(item4.getID().toString()); + + List points = + List.of(expectedPoint1, expectedPoint2, expectedPoint3, expectedPoint4); + + UsageReportPointCityRest pointCity = new UsageReportPointCityRest(); + pointCity.addValue("views", 5); + pointCity.setId("New York"); + + UsageReportPointContinentRest pointContinent = new UsageReportPointContinentRest(); + pointContinent.addValue("views", 5); + pointContinent.setId("North America"); + + UsageReportPointCountryRest pointCountry = new UsageReportPointCountryRest(); + pointCountry.addValue("views", 5); + pointCountry.setIdAndLabel(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale())); + + UsageReportPointCategoryRest articleCategory = new UsageReportPointCategoryRest(); + articleCategory.addValue("views", 1); + articleCategory.setId("article"); + + UsageReportPointCategoryRest thesisCategory = new UsageReportPointCategoryRest(); + thesisCategory.addValue("views", 3); + thesisCategory.setId("thesis"); + + UsageReportPointCategoryRest otherCategory = new UsageReportPointCategoryRest(); + otherCategory.addValue("views", 1); + otherCategory.setId("other"); + + UsageReportPointCategoryRest bookCategory = new UsageReportPointCategoryRest(); + bookCategory.addValue("views", 0); + bookCategory.setId("book"); + + UsageReportPointCategoryRest bookChapterCategory = new UsageReportPointCategoryRest(); + bookChapterCategory.addValue("views", 0); + bookChapterCategory.setId("bookChapter"); + + UsageReportPointCategoryRest datasetCategory = new UsageReportPointCategoryRest(); + datasetCategory.addValue("views", 0); + datasetCategory.setId("dataset"); + + List categories = + List.of(articleCategory, thesisCategory, otherCategory, bookCategory, + bookChapterCategory, datasetCategory); + + // And request the collections global usage report (show top most popular items) + getClient(adminToken) + .perform(get("/api/statistics/usagereports/search/object") + .param("category", "publicationCollection-itemReports") + .param("uri", "http://localhost:8080/server/api/core/collections/" + + collectionNotVisited.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + matchUsageReport(collectionNotVisited.getID() + "_" + TOTAL_ITEMS_VISITS_REPORT_ID, + TOP_ITEMS_REPORT_ID, points), + matchUsageReport(collectionNotVisited.getID() + "_" + TOP_ITEMS_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, List.of(pointCity)), + matchUsageReport(collectionNotVisited.getID() + "_" + TOTAL_ITEMS_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, getLastMonthVisitPoints(5)), + matchUsageReport(collectionNotVisited.getID() + "_" + TOP_ITEMS_CONTINENTS_REPORT_ID, + TOP_CONTINENTS_REPORT_ID, List.of(pointContinent)), + matchUsageReport(collectionNotVisited.getID() + "_" + TOP_ITEMS_CATEGORIES_REPORT_ID, + TOP_CATEGORIES_REPORT_ID, categories), + matchUsageReport(collectionNotVisited.getID() + "_" + TOP_ITEMS_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, List.of(pointCountry))))); + })); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest4)) + .contentType(contentType)) + .andExpect(status().isCreated()); + } + + @Test + public void usageReportsSearch_Collection_DownloadReports() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item item1 = ItemBuilder.createItem(context, collectionNotVisited) + .withTitle("Item 1") + .build(); + + Item item2 = ItemBuilder.createItem(context, collectionNotVisited) + .withTitle("Item 2") + .build(); + + Item item3 = ItemBuilder.createItem(context, collectionNotVisited) + .withTitle("Item 3") + .build(); + + Bitstream bitstream1 = createBitstream(item1, "Bitstream 1"); + Bitstream bitstream2 = createBitstream(item1, "Bitstream 2"); + Bitstream bitstream3 = createBitstream(item2, "Bitstream 3"); + Bitstream bitstream4 = createBitstream(item3, "Bitstream 4"); + + getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream2.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream4.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream4.getID() + "/content")) + .andExpect(status().isOk()); + + context.restoreAuthSystemState(); + + UsageReportPointDsoTotalVisitsRest expectedPoint1 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint1.addValue("views", 3); + expectedPoint1.setType("item"); + expectedPoint1.setLabel("Item 1"); + expectedPoint1.setId(item1.getID().toString()); + + UsageReportPointDsoTotalVisitsRest expectedPoint2 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint2.addValue("views", 3); + expectedPoint2.setType("item"); + expectedPoint2.setLabel("Item 2"); + expectedPoint2.setId(item2.getID().toString()); + + UsageReportPointDsoTotalVisitsRest expectedPoint3 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint3.addValue("views", 2); + expectedPoint3.setType("item"); + expectedPoint3.setLabel("Item 3"); + expectedPoint3.setId(item3.getID().toString()); + + List points = List.of(expectedPoint1, expectedPoint2, expectedPoint3); + + UsageReportPointCityRest pointCity = new UsageReportPointCityRest(); + pointCity.addValue("views", 8); + pointCity.setId("New York"); + + UsageReportPointContinentRest pointContinent = new UsageReportPointContinentRest(); + pointContinent.addValue("views", 8); + pointContinent.setId("North America"); + + UsageReportPointCountryRest pointCountry = new UsageReportPointCountryRest(); + pointCountry.addValue("views", 8); + pointCountry.setIdAndLabel(Locale.US.getCountry(), Locale.US.getDisplayCountry(context.getCurrentLocale())); + + getClient(adminToken) + .perform(get("/api/statistics/usagereports/search/object") + .param("category", "collection-downloadReports") + .param("uri", + "http://localhost:8080/server/api/core/collections/" + collectionNotVisited.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + matchUsageReport(collectionNotVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, + TOP_ITEMS_REPORT_ID, points), + matchUsageReport(collectionNotVisited.getID() + "_" + TOP_DOWNLOAD_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, List.of(pointCity)), + matchUsageReport(collectionNotVisited.getID() + "_" + TOTAL_DOWNLOAD_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, getLastMonthVisitPoints(8)), + matchUsageReport(collectionNotVisited.getID() + "_" + TOP_DOWNLOAD_CONTINENTS_REPORT_ID, + TOP_CONTINENTS_REPORT_ID, List.of(pointContinent)), + matchUsageReport(collectionNotVisited.getID() + "_" + TOP_DOWNLOAD_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, List.of(pointCountry))))); + } + + @Test + public void usageReportsSearch_Community_ItemReports() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context).build(); + collectionNotVisited = CollectionBuilder.createCollection(context, community).build(); + + Item item = ItemBuilder.createItem(context, collectionNotVisited) + .withEntityType("Publication") + .withTitle("My item") + .build(); + Item item2 = ItemBuilder.createItem(context, collectionNotVisited) + .withEntityType("Patent") + .withTitle("My item 2") + .build(); + Item item3 = ItemBuilder.createItem(context, collectionNotVisited) + .withEntityType("Funding") + .withTitle("My item 3") + .build(); + Item item4 = ItemBuilder.createItem(context, collectionNotVisited) + .withEntityType("Project") + .withTitle("My item 4") + .build(); + context.restoreAuthSystemState(); + + ObjectMapper mapper = new ObjectMapper(); + + ViewEventRest viewEventRest = new ViewEventRest(); + viewEventRest.setTargetType("item"); + viewEventRest.setTargetId(item.getID()); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + ViewEventRest viewEventRest2 = new ViewEventRest(); + viewEventRest2.setTargetType("item"); + viewEventRest2.setTargetId(item2.getID()); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest2)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest2)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + ViewEventRest viewEventRest3 = new ViewEventRest(); + viewEventRest3.setTargetType("item"); + viewEventRest3.setTargetId(item3.getID()); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest3)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + Thread.sleep(1000); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + + UsageReportPointDsoTotalVisitsRest expectedPoint1 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint1.addValue("views", 1); + expectedPoint1.setType("item"); + expectedPoint1.setLabel("My item"); + expectedPoint1.setId(item.getID().toString()); + + UsageReportPointDsoTotalVisitsRest expectedPoint2 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint2.addValue("views", 2); + expectedPoint2.setType("item"); + expectedPoint2.setLabel("My item 2"); + expectedPoint2.setId(item2.getID().toString()); + + UsageReportPointDsoTotalVisitsRest expectedPoint3 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint3.addValue("views", 1); + expectedPoint3.setType("item"); + expectedPoint3.setLabel("My item 3"); + expectedPoint3.setId(item3.getID().toString()); + + UsageReportPointDsoTotalVisitsRest expectedPoint4 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint4.addValue("views", 1); + expectedPoint4.setType("item"); + expectedPoint4.setLabel("My item 4"); + expectedPoint4.setId(item4.getID().toString()); + + List points = + List.of(expectedPoint1, expectedPoint2, expectedPoint3, expectedPoint4); + + UsageReportPointCityRest pointCity = new UsageReportPointCityRest(); + pointCity.addValue("views", 5); + pointCity.setId("New York"); + + UsageReportPointContinentRest pointContinent = new UsageReportPointContinentRest(); + pointContinent.addValue("views", 5); + pointContinent.setId("North America"); + + UsageReportPointCountryRest pointCountry = new UsageReportPointCountryRest(); + pointCountry.addValue("views", 5); + pointCountry.setIdAndLabel(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale())); + + UsageReportPointCategoryRest publicationCategory = new UsageReportPointCategoryRest(); + publicationCategory.addValue("views", 1); + publicationCategory.setId("publication"); + + UsageReportPointCategoryRest patentCategory = new UsageReportPointCategoryRest(); + patentCategory.addValue("views", 2); + patentCategory.setId("patent"); + + UsageReportPointCategoryRest fundingCategory = new UsageReportPointCategoryRest(); + fundingCategory.addValue("views", 1); + fundingCategory.setId("funding"); + + UsageReportPointCategoryRest projectCategory = new UsageReportPointCategoryRest(); + projectCategory.addValue("views", 1); + projectCategory.setId("project"); + + UsageReportPointCategoryRest productCategory = new UsageReportPointCategoryRest(); + productCategory.addValue("views", 0); + productCategory.setId("product"); + + UsageReportPointCategoryRest journalCategory = new UsageReportPointCategoryRest(); + journalCategory.addValue("views", 0); + journalCategory.setId("journal"); + + UsageReportPointCategoryRest personCategory = new UsageReportPointCategoryRest(); + personCategory.addValue("views", 0); + personCategory.setId("person"); + + UsageReportPointCategoryRest orgUnitCategory = new UsageReportPointCategoryRest(); + orgUnitCategory.addValue("views", 0); + orgUnitCategory.setId("orgunit"); + + UsageReportPointCategoryRest equipmentCategory = new UsageReportPointCategoryRest(); + equipmentCategory.addValue("views", 0); + equipmentCategory.setId("equipment"); + + UsageReportPointCategoryRest eventCategory = new UsageReportPointCategoryRest(); + eventCategory.addValue("views", 0); + eventCategory.setId("event"); + + List categories = List.of(publicationCategory, patentCategory, + fundingCategory, + projectCategory, productCategory, journalCategory, + personCategory, orgUnitCategory, + equipmentCategory, eventCategory); + // And request the collections global usage report (show top most popular items) + getClient(adminToken) + .perform(get("/api/statistics/usagereports/search/object") + .param("category", "community-itemReports") + .param("uri", + "http://localhost:8080/server/api/core/communities/" + community.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + matchUsageReport(community.getID() + "_" + TOTAL_ITEMS_VISITS_REPORT_ID, + TOP_ITEMS_REPORT_ID, points), + matchUsageReport(community.getID() + "_" + TOP_ITEMS_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, List.of(pointCity)), + matchUsageReport(community.getID() + "_" + TOTAL_ITEMS_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, getLastMonthVisitPoints(5)), + matchUsageReport(community.getID() + "_" + TOP_ITEMS_CONTINENTS_REPORT_ID, + TOP_CONTINENTS_REPORT_ID, List.of(pointContinent)), + matchUsageReport(community.getID() + "_" + TOP_ITEMS_CATEGORIES_REPORT_ID, + TOP_CATEGORIES_REPORT_ID, categories), + matchUsageReport(community.getID() + "_" + TOP_ITEMS_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, List.of(pointCountry))))); + })); + + ViewEventRest viewEventRest4 = new ViewEventRest(); + viewEventRest4.setTargetType("item"); + viewEventRest4.setTargetId(item4.getID()); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest4)) + .contentType(contentType)) + .andExpect(status().isCreated()); + } + + @Test + public void usageReportsSearch_Community_DownloadReports() throws Exception { + + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context).build(); + collectionNotVisited = CollectionBuilder.createCollection(context, community).build(); + + Item item1 = ItemBuilder.createItem(context, collectionNotVisited) + .withTitle("Item 1") + .build(); + + Item item2 = ItemBuilder.createItem(context, collectionNotVisited) + .withTitle("Item 2") + .build(); + + Item item3 = ItemBuilder.createItem(context, collectionNotVisited) + .withTitle("Item 3") + .build(); + + Bitstream bitstream1 = createBitstream(item1, "Bitstream 1"); + Bitstream bitstream2 = createBitstream(item1, "Bitstream 2"); + Bitstream bitstream3 = createBitstream(item2, "Bitstream 3"); + Bitstream bitstream4 = createBitstream(item3, "Bitstream 4"); + + getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream2.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream4.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream4.getID() + "/content")) + .andExpect(status().isOk()); + + context.restoreAuthSystemState(); + + UsageReportPointDsoTotalVisitsRest expectedPoint1 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint1.addValue("views", 3); + expectedPoint1.setType("item"); + expectedPoint1.setLabel("Item 1"); + expectedPoint1.setId(item1.getID().toString()); + + UsageReportPointDsoTotalVisitsRest expectedPoint2 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint2.addValue("views", 3); + expectedPoint2.setType("item"); + expectedPoint2.setLabel("Item 2"); + expectedPoint2.setId(item2.getID().toString()); + + UsageReportPointDsoTotalVisitsRest expectedPoint3 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint3.addValue("views", 2); + expectedPoint3.setType("item"); + expectedPoint3.setLabel("Item 3"); + expectedPoint3.setId(item3.getID().toString()); + + List points = List.of(expectedPoint1, expectedPoint2, expectedPoint3); + + UsageReportPointCityRest pointCity = new UsageReportPointCityRest(); + pointCity.addValue("views", 8); + pointCity.setId("New York"); + + UsageReportPointContinentRest pointContinent = new UsageReportPointContinentRest(); + pointContinent.addValue("views", 8); + pointContinent.setId("North America"); + + UsageReportPointCountryRest pointCountry = new UsageReportPointCountryRest(); + pointCountry.addValue("views", 8); + pointCountry.setIdAndLabel(Locale.US.getCountry(), Locale.US.getDisplayCountry(context.getCurrentLocale())); + + getClient(adminToken) + .perform(get("/api/statistics/usagereports/search/object") + .param("category", "community-downloadReports") + .param("uri", "http://localhost:8080/server/api/core/communities/" + community.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + matchUsageReport(community.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, TOP_ITEMS_REPORT_ID, points), + matchUsageReport(community.getID() + "_" + TOP_DOWNLOAD_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, List.of(pointCity)), + matchUsageReport(community.getID() + "_" + TOTAL_DOWNLOAD_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, getLastMonthVisitPoints(8)), + matchUsageReport(community.getID() + "_" + TOP_DOWNLOAD_CONTINENTS_REPORT_ID, + TOP_CONTINENTS_REPORT_ID, List.of(pointContinent)), + matchUsageReport(community.getID() + "_" + TOP_DOWNLOAD_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, List.of(pointCountry))))); + } + + private LocalDate toLocalDate(Date date) { + return date.toInstant() + .atZone(ZoneId.systemDefault()) + .toLocalDate(); } private List getLastMonthVisitPoints(int viewsLastMonth) { @@ -2446,10 +3226,45 @@ private List getListOfVisitsPerMonthsPoints(int viewsLastM return getListOfVisitsPerMonthsPoints(viewsLastMonth, nrOfMonthsBack); } - private LocalDate toLocalDate(Date date) { - return date.toInstant() - .atZone(ZoneId.systemDefault()) - .toLocalDate(); + public static final class StatisticsEventListener implements EventListener { + + public Queue> consumers = new LinkedList<>(); + + /* (non-Javadoc) + * @see org.dspace.services.model.EventListener#getEventNamePrefixes() + */ + public String[] getEventNamePrefixes() { + return null; + } + + /* (non-Javadoc) + * @see org.dspace.services.model.EventListener#getResourcePrefix() + */ + public String getResourcePrefix() { + return null; + } + + public void addConsumer(Consumer... consumers) { + this.consumers.addAll(List.of(consumers)); + } + + public Queue> getConsumers() { + return this.consumers; + } + + public void clearConsumers() { + this.consumers.clear(); + } + + /* (non-Javadoc) + * @see org.dspace.services.model.EventListener#receiveEvent(org.dspace.services.model.Event) + */ + public void receiveEvent(Event event) { + Consumer poll = this.consumers.poll(); + if (poll != null) { + poll.accept(event); + } + } } private List getListOfVisitsPerMonthsPoints(int viewsLastMonth, int nrOfMonthsBack) { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java index a0343d67e93d..269de3aefb9d 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java @@ -9,7 +9,6 @@ import static com.jayway.jsonpath.JsonPath.read; import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; -import static org.dspace.app.rest.test.AbstractControllerIntegrationTest.REST_SERVER_URL; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; @@ -322,10 +321,10 @@ public void findAllPaginationTest() throws Exception { Matchers.containsString("page=1"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=16"), Matchers.containsString("size=1")))) + Matchers.containsString("page=18"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(17))) - .andExpect(jsonPath("$.page.totalPages", is(17))) + .andExpect(jsonPath("$.page.totalElements", is(19))) + .andExpect(jsonPath("$.page.totalPages", is(19))) .andExpect(jsonPath("$.page.number", is(0))); getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") @@ -348,10 +347,10 @@ public void findAllPaginationTest() throws Exception { Matchers.containsString("page=1"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=16"), Matchers.containsString("size=1")))) + Matchers.containsString("page=18"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(17))) - .andExpect(jsonPath("$.page.totalPages", is(17))) + .andExpect(jsonPath("$.page.totalElements", is(19))) + .andExpect(jsonPath("$.page.totalPages", is(19))) .andExpect(jsonPath("$.page.number", is(1))); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java index 9236e7c4ce25..d817b573ff2b 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java @@ -26,6 +26,7 @@ import org.dspace.app.rest.repository.SubmissionFormRestRepository; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.util.DCInputsReaderException; +import org.dspace.app.util.SubmissionConfigReaderException; import org.dspace.builder.EPersonBuilder; import org.dspace.content.authority.DCInputAuthority; import org.dspace.content.authority.service.ChoiceAuthorityService; @@ -72,12 +73,12 @@ public void findAll() throws Exception { .andExpect(content().contentType(contentType)) //The configuration file for the test env includes PAGE_TOTAL_ELEMENTS forms .andExpect(jsonPath("$.page.size", is(20))) - .andExpect(jsonPath("$.page.totalElements", equalTo(PAGE_TOTAL_ELEMENTS))) + .andExpect(jsonPath("$.page.totalElements", equalTo(40))) .andExpect(jsonPath("$.page.totalPages", equalTo(2))) .andExpect(jsonPath("$.page.number", is(0))) .andExpect( jsonPath("$._links.self.href", Matchers.startsWith(REST_SERVER_URL + "config/submissionforms"))) - //The array of submissionforms should have a size of 20 (default pagination size) + //The array of submissionforms should have a size of 34 .andExpect(jsonPath("$._embedded.submissionforms", hasSize(equalTo(20)))) ; } @@ -89,7 +90,7 @@ public void findAllWithNewlyCreatedAccountTest() throws Exception { .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) .andExpect(jsonPath("$.page.size", is(20))) - .andExpect(jsonPath("$.page.totalElements", equalTo(PAGE_TOTAL_ELEMENTS))) + .andExpect(jsonPath("$.page.totalElements", equalTo(40))) .andExpect(jsonPath("$.page.totalPages", equalTo(2))) .andExpect(jsonPath("$.page.number", is(0))) .andExpect(jsonPath("$._links.self.href", Matchers.startsWith(REST_SERVER_URL @@ -638,59 +639,7 @@ public void noExternalSourcesTest() throws Exception { ; } - @Test - public void findPublicationFormTest() throws Exception { - String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/config/submissionforms/publication")) - .andExpect(status().isOk()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$.id", is("publication"))) - .andExpect(jsonPath("$.name", is("publication"))) - .andExpect(jsonPath("$.type", is("submissionform"))) - .andExpect(jsonPath("$.rows[1].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("onebox", "Title", "You must enter a main title for this item.", - false, "Enter the main title of the item.", null, "dc.title", null)))) - .andExpect(jsonPath("$.rows[2].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("onebox", "Other Titles", null, true, - "If the item has any alternative titles, please enter them here.", null, - "dc.title.alternative", null)))) - .andExpect(jsonPath("$.rows[3].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("date", "Date of Issue", "You must enter at least the year.", - false, "Please give the date of previous publication or public distribution.\n" - + " You can leave out the day and/or month if they aren't\n" - + " applicable.", null, "dc.date.issued", null)))) - .andExpect(jsonPath("$.rows[4].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("group", "Authors", null, true, - "Enter the names of the authors of this item.", null, - "dc.contributor.author", "AuthorAuthority")))) - .andExpect(jsonPath("$.rows[4].fields[0].rows[0].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("onebox", "Author", "You must enter at least the author.", - false, "Enter the names of the authors of this item in the form Lastname," - + " Firstname [i.e. Smith, Josh or Smith, J].", null, "dc.contributor.author", - "AuthorAuthority")))) - .andExpect(jsonPath("$.rows[4].fields[0].rows[1].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("onebox", "Affiliation", null, false, - "Enter the affiliation of the author as stated on the publication.", - null, "oairecerif.author.affiliation", "OrgUnitAuthority")))) - .andExpect(jsonPath("$.rows[5].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("group", "Editors", null, true, - "The editors of this publication.", null, - "dc.contributor.editor", "EditorAuthority")))) - .andExpect(jsonPath("$.rows[5].fields[0].rows[0].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("onebox", "Editor", "You must enter at least the author.", - false, "The editors of this publication.", null, "dc.contributor.editor", - "EditorAuthority")))) - .andExpect(jsonPath("$.rows[5].fields[0].rows[1].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("onebox", "Affiliation", null, false, - "Enter the affiliation of the editor as stated on the publication.", - null, "oairecerif.editor.affiliation", "OrgUnitAuthority")))) - .andExpect(jsonPath("$.rows[6].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("onebox", "Type", "You must select a publication type", false, - "Select the type of content of the item.", null, - "dc.type", "types")))); - } - - private void resetLocalesConfiguration() throws DCInputsReaderException { + private void resetLocalesConfiguration() throws DCInputsReaderException, SubmissionConfigReaderException { configurationService.setProperty("default.locale","en"); configurationService.setProperty("webui.supported.locales",null); submissionFormRestRepository.reload(); @@ -721,38 +670,38 @@ public void findAllPaginationTest() throws Exception { Matchers.containsString("page=1"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=" + (PAGE_TOTAL_PAGES - 1)), Matchers.containsString("size=2")))) + Matchers.containsString("page=19"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(PAGE_TOTAL_ELEMENTS))) - .andExpect(jsonPath("$.page.totalPages", equalTo(PAGE_TOTAL_PAGES))) + .andExpect(jsonPath("$.page.totalElements", equalTo(40))) + .andExpect(jsonPath("$.page.totalPages", equalTo(20))) .andExpect(jsonPath("$.page.number", is(0))); getClient(tokenAdmin).perform(get("/api/config/submissionforms") .param("size", "2") - .param("page", "1")) + .param("page", "15")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("publication_references"))) - .andExpect(jsonPath("$._embedded.submissionforms[1].id", is("patent_references"))) + .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("publication-dc-contributor-author"))) + .andExpect(jsonPath("$._embedded.submissionforms[1].id", is("publication-dc-contributor-editor"))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), Matchers.containsString("page=0"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=0"), Matchers.containsString("size=2")))) + Matchers.containsString("page=14"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.self.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=1"), Matchers.containsString("size=2")))) + Matchers.containsString("page=15"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.next.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=2"), Matchers.containsString("size=2")))) + Matchers.containsString("page=16"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=" + (PAGE_TOTAL_PAGES - 1)), Matchers.containsString("size=2")))) + Matchers.containsString("page=19"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(PAGE_TOTAL_ELEMENTS))) - .andExpect(jsonPath("$.page.totalPages", equalTo(PAGE_TOTAL_PAGES))) - .andExpect(jsonPath("$.page.number", is(1))); + .andExpect(jsonPath("$.page.totalElements", equalTo(40))) + .andExpect(jsonPath("$.page.totalPages", equalTo(20))) + .andExpect(jsonPath("$.page.number", is(15))); } @Test @@ -767,7 +716,7 @@ public void visibilityTest() throws Exception { .andExpect(jsonPath("$.rows[0].fields", contains( matchFormWithoutVisibility("Title"), matchFormWithVisibility("Date of Issue", - Map.of("submission", "read-only", "workflow", "hidden", "edit", "hidden")), + Map.of("submission", "read-only", "workflow", "hidden", "edit", "hidden")), matchFormWithVisibility("Type", Map.of("workflow", "hidden", "edit", "hidden")), matchFormWithVisibility("Language", Map.of("submission", "read-only", "workflow", "read-only", "edit", "read-only")), @@ -778,5 +727,27 @@ public void visibilityTest() throws Exception { Map.of("submission", "hidden", "workflow", "read-only", "edit", "read-only")), matchFormWithVisibility("Description", Map.of("submission", "hidden")) ))); + + getClient(tokenAdmin).perform(get("/api/config/submissionforms") + .param("size", "2") + .param("page", "4")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._links.first.href", Matchers.allOf( + Matchers.containsString("/api/config/submissionforms?"), + Matchers.containsString("page=0"), Matchers.containsString("size=2")))) + .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( + Matchers.containsString("/api/config/submissionforms?"), + Matchers.containsString("page=3"), Matchers.containsString("size=2")))) + .andExpect(jsonPath("$._links.self.href", Matchers.allOf( + Matchers.containsString("/api/config/submissionforms?"), + Matchers.containsString("page=4"), Matchers.containsString("size=2")))) + .andExpect(jsonPath("$._links.last.href", Matchers.allOf( + Matchers.containsString("/api/config/submissionforms?"), + Matchers.containsString("page=19"), Matchers.containsString("size=2")))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.totalElements", equalTo(40))) + .andExpect(jsonPath("$.page.totalPages", equalTo(20))) + .andExpect(jsonPath("$.page.number", is(4))); } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/UpdateItemReferenceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/UpdateItemReferenceIT.java index cc51a7d413f0..bef44b4b714b 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/UpdateItemReferenceIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/UpdateItemReferenceIT.java @@ -40,6 +40,10 @@ */ public class UpdateItemReferenceIT extends AbstractControllerIntegrationTest { + + private static final ConfigurationService configService = + DSpaceServicesFactory.getInstance().getConfigurationService(); + private static final EventService eventService = EventServiceFactory.getInstance().getEventService(); private static String[] consumers; @Autowired @@ -52,13 +56,11 @@ public class UpdateItemReferenceIT extends AbstractControllerIntegrationTest { */ @BeforeClass public static void initCrisConsumer() { - ConfigurationService configService = DSpaceServicesFactory.getInstance().getConfigurationService(); consumers = configService.getArrayProperty("event.dispatcher.default.consumers"); Set consumersSet = new HashSet(Arrays.asList(consumers)); consumersSet.remove("referenceresolver"); consumersSet.remove("crisconsumer"); configService.setProperty("event.dispatcher.default.consumers", consumersSet.toArray()); - EventService eventService = EventServiceFactory.getInstance().getEventService(); eventService.reloadConfiguration(); } @@ -67,9 +69,7 @@ public static void initCrisConsumer() { */ @AfterClass public static void resetDefaultConsumers() { - ConfigurationService configService = DSpaceServicesFactory.getInstance().getConfigurationService(); configService.setProperty("event.dispatcher.default.consumers", consumers); - EventService eventService = EventServiceFactory.getInstance().getEventService(); eventService.reloadConfiguration(); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/UpdateViewAndDownloadMetricsIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/UpdateViewAndDownloadMetricsIT.java index 94e4b2858752..d33d4d0c0bcd 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/UpdateViewAndDownloadMetricsIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/UpdateViewAndDownloadMetricsIT.java @@ -10,6 +10,7 @@ import static org.apache.commons.codec.CharEncoding.UTF_8; import static org.apache.commons.io.IOUtils.toInputStream; import static org.dspace.app.launcher.ScriptLauncher.handleScript; +import static org.dspace.util.FunctionalUtils.throwingConsumerWrapper; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; @@ -25,6 +26,7 @@ import org.dspace.app.launcher.ScriptLauncher; import org.dspace.app.metrics.CrisMetrics; import org.dspace.app.metrics.service.CrisMetricsService; +import org.dspace.app.rest.StatisticsRestRepositoryIT.StatisticsEventListener; import org.dspace.app.rest.model.ViewEventRest; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; @@ -38,6 +40,7 @@ import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.services.ConfigurationService; +import org.dspace.services.EventService; import org.dspace.statistics.factory.StatisticsServiceFactory; import org.dspace.statistics.service.SolrLoggerService; import org.junit.Before; @@ -53,14 +56,19 @@ public class UpdateViewAndDownloadMetricsIT extends AbstractControllerIntegratio private CrisMetricsService crisMetriscService; @Autowired ConfigurationService configurationService; + @Autowired + protected EventService eventService; CrisMetrics crisMetrics = null; + protected final StatisticsEventListener statisticsEventListener = new StatisticsEventListener(); + @Before @Override public void setUp() throws Exception { super.setUp(); // Explicitly use solr commit in SolrLoggerServiceImpl#postView configurationService.setProperty("solr-statistics.autoCommit", false); + this.eventService.registerEventListener(this.statisticsEventListener); } @@ -105,24 +113,26 @@ public void storeCrisMetricsForItemWithViewAndDownloads() throws Exception { .content(mapper.writeValueAsBytes(viewEventRestBitstream)) .contentType(contentType)) .andExpect(status().isCreated()); - - String[] args = new String[]{"store-metrics"}; - TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); - int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); - assertEquals(0, status); - //find view and downloads metrics - CrisMetrics metrics_downloads = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + String[] args = new String[] {"store-metrics"}; + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); + assertEquals(0, status); + //find view and downloads metrics + CrisMetrics metrics_downloads = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( context, "download", itemVisited.getID()); - CrisMetrics metrics_views = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + CrisMetrics metrics_views = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( context, "view", itemVisited.getID()); - assertEquals("view", metrics_views.getMetricType()); - assertEquals("download", metrics_downloads.getMetricType()); - assertEquals(2, metrics_downloads.getMetricCount(), 0); - assertEquals(1, metrics_views.getMetricCount(), 0); - assertNull(metrics_downloads.getDeltaPeriod1()); - assertNull(metrics_views.getDeltaPeriod2()); - assertTrue(metrics_views.getLast()); - assertTrue(metrics_downloads.getLast()); + assertEquals("view", metrics_views.getMetricType()); + assertEquals("download", metrics_downloads.getMetricType()); + assertEquals(2, metrics_downloads.getMetricCount(), 0); + assertEquals(1, metrics_views.getMetricCount(), 0); + assertNull(metrics_downloads.getDeltaPeriod1()); + assertNull(metrics_views.getDeltaPeriod2()); + assertTrue(metrics_views.getLast()); + assertTrue(metrics_downloads.getLast()); + })); } //test only with views @@ -148,21 +158,25 @@ public void storeCrisMetricsForItemWithViews() throws Exception { .content(mapper.writeValueAsBytes(viewEventRestItem)) .contentType(contentType)) .andExpect(status().isCreated()); - String[] args = new String[]{"store-metrics"}; - TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); - int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); - assertEquals(0, status); - CrisMetrics metrics_views = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + String[] args = new String[] {"store-metrics"}; + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); + assertEquals(0, status); + CrisMetrics metrics_views = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( context, "view", itemVisited.getID()); - // find downloads metric - CrisMetrics metrics_downloads = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + // find downloads metric + CrisMetrics metrics_downloads = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( context, "download", itemVisited.getID()); - assertEquals("view", metrics_views.getMetricType()); - assertTrue(metrics_views.getLast()); - assertEquals(1, metrics_views.getMetricCount(), 0); - assertNull(metrics_views.getDeltaPeriod2()); - // must be null because for the item there are not downloads - assertNull(metrics_downloads); + assertEquals("view", metrics_views.getMetricType()); + assertTrue(metrics_views.getLast()); + assertEquals(1, metrics_views.getMetricCount(), 0); + assertNull(metrics_views.getDeltaPeriod2()); + // must be null because for the item there are not downloads + assertNull(metrics_downloads); + })); } //test with previous metrics @@ -209,26 +223,29 @@ public void storeCrisMetricsForItemWithViewAndDownloadsWithExistingValues() thro .contentType(contentType)) .andExpect(status().isCreated()); - String[] args = new String[]{"store-metrics"}; - TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); - int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); - assertEquals(0, status); - CrisMetrics metrics_downloads = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + String[] args = new String[] {"store-metrics"}; + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); + assertEquals(0, status); + CrisMetrics metrics_downloads = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( context, "download", itemVisited.getID()); - CrisMetrics metrics_views = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + CrisMetrics metrics_views = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( context, "view", itemVisited.getID()); - // find previous metric - CrisMetrics old_metric = crisMetriscService.find(context, crisMetrics.getID()); - assertEquals("view", metrics_views.getMetricType()); - assertEquals("download", metrics_downloads.getMetricType()); - assertEquals(2, metrics_downloads.getMetricCount(), 0); - assertEquals(1, metrics_views.getMetricCount(), 0); - assertNull(metrics_downloads.getDeltaPeriod1()); - assertNull(metrics_views.getDeltaPeriod2()); - assertTrue(metrics_views.getLast()); - assertTrue(metrics_downloads.getLast()); - // previous metric must have last value false - assertFalse(old_metric.getLast()); + // find previous metric + CrisMetrics old_metric = crisMetriscService.find(context, crisMetrics.getID()); + assertEquals("view", metrics_views.getMetricType()); + assertEquals("download", metrics_downloads.getMetricType()); + assertEquals(2, metrics_downloads.getMetricCount(), 0); + assertEquals(1, metrics_views.getMetricCount(), 0); + assertNull(metrics_downloads.getDeltaPeriod1()); + assertNull(metrics_views.getDeltaPeriod2()); + assertTrue(metrics_views.getLast()); + assertTrue(metrics_downloads.getLast()); + // previous metric must have last value false + assertFalse(old_metric.getLast()); + })); } //test with previous week and month views and downloads @@ -298,36 +315,41 @@ context, itemVisited, toInputStream("test", UTF_8)) .content(mapper.writeValueAsBytes(viewEventRestBitstream)) .contentType(contentType)) .andExpect(status().isCreated()); - String[] args = new String[] {"store-metrics"}; - TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); - int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); - assertEquals(0, status); - CrisMetrics metrics_downloads = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "download", itemVisited.getID()); - CrisMetrics metrics_views = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "view", itemVisited.getID()); - // find previous metrics - CrisMetrics old_metric_views_month = crisMetriscService.find(context, crisMetrics_previous_week_views.getID()); - CrisMetrics old_metric_views_week = crisMetriscService.find(context, crisMetrics_previous_month_views.getID()); - CrisMetrics old_metric_downloads_month = crisMetriscService.find(context, - crisMetrics_previous_month_downloads.getID()); - CrisMetrics old_metric_downloads_week = crisMetriscService.find(context, - crisMetrics_previous_week_downloads.getID()); - assertEquals("view", metrics_views.getMetricType()); - assertEquals("download", metrics_downloads.getMetricType()); - assertEquals(2, metrics_downloads.getMetricCount(), 0); - assertEquals(1, metrics_views.getMetricCount(), 0); - assertTrue(metrics_views.getLast()); - assertTrue(metrics_downloads.getLast()); - assertTrue(metrics_views.getDeltaPeriod1() == 0); - assertTrue(metrics_views.getDeltaPeriod2() == 0); - assertTrue(metrics_downloads.getDeltaPeriod1() == 1); - assertTrue(metrics_downloads.getDeltaPeriod2() == 0); - // all last values of previous must be false - assertFalse(old_metric_views_month.getLast()); - assertFalse(old_metric_views_week.getLast()); - assertFalse(old_metric_downloads_month.getLast()); - assertFalse(old_metric_downloads_week.getLast()); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + String[] args = new String[] {"store-metrics"}; + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); + assertEquals(0, status); + CrisMetrics metrics_downloads = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "download", itemVisited.getID()); + CrisMetrics metrics_views = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "view", itemVisited.getID()); + // find previous metrics + CrisMetrics old_metric_views_month = + crisMetriscService.find(context, crisMetrics_previous_week_views.getID()); + CrisMetrics old_metric_views_week = + crisMetriscService.find(context, crisMetrics_previous_month_views.getID()); + CrisMetrics old_metric_downloads_month = crisMetriscService.find(context, + crisMetrics_previous_month_downloads.getID()); + CrisMetrics old_metric_downloads_week = crisMetriscService.find(context, + crisMetrics_previous_week_downloads.getID()); + assertEquals("view", metrics_views.getMetricType()); + assertEquals("download", metrics_downloads.getMetricType()); + assertEquals(2, metrics_downloads.getMetricCount(), 0); + assertEquals(1, metrics_views.getMetricCount(), 0); + assertTrue(metrics_views.getLast()); + assertTrue(metrics_downloads.getLast()); + assertTrue(metrics_views.getDeltaPeriod1() == 0); + assertTrue(metrics_views.getDeltaPeriod2() == 0); + assertTrue(metrics_downloads.getDeltaPeriod1() == 1); + assertTrue(metrics_downloads.getDeltaPeriod2() == 0); + // all last values of previous must be false + assertFalse(old_metric_views_month.getLast()); + assertFalse(old_metric_views_week.getLast()); + assertFalse(old_metric_downloads_month.getLast()); + assertFalse(old_metric_downloads_week.getLast()); + })); } @@ -407,46 +429,49 @@ context, item, toInputStream("test", UTF_8)) .content(mapper.writeValueAsBytes(viewEventRestBitstream)) .contentType(contentType)) .andExpect(status().isCreated()); - String[] args = new String[]{"store-metrics"}; - TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); - int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); - assertEquals(0, status); - CrisMetrics metric_view_item = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "view", item.getID()); - CrisMetrics metric_download = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "download", item.getID()); - CrisMetrics metrics_views_comm = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "view", community.getID()); - // find previous metrics - CrisMetrics old_metric_views_month = crisMetriscService.find(context, - crisMetrics_previous_month_views_comm.getID()); - CrisMetrics old_metric_views_week = crisMetriscService.find(context, - crisMetrics_previous_week_views_comm.getID()); - CrisMetrics old_metric_downloads_week = crisMetriscService.find(context, - crisMetrics_previous_week_downloads.getID()); - - - //control download values - assertEquals("download", metric_download.getMetricType()); - assertEquals(1, metric_download.getMetricCount(), 0); - assertTrue(metric_download.getLast()); - assertTrue(metric_download.getDeltaPeriod1() == 0.0); - - assertEquals("view", metrics_views_comm.getMetricType()); - assertTrue(metrics_views_comm.getLast()); - assertEquals(2, metrics_views_comm.getMetricCount(), 0); - assertTrue(metrics_views_comm.getDeltaPeriod1() == 1.0); - assertTrue(metrics_views_comm.getDeltaPeriod2() == 1.0); - - assertEquals("view", metric_view_item.getMetricType()); - assertEquals(1, metric_view_item.getMetricCount(), 0); - assertTrue(metric_view_item.getDeltaPeriod1() == 0.0); - assertTrue(metric_view_item.getLast()); - - // all last values of previous must be false - assertFalse(old_metric_views_month.getLast()); - assertFalse(old_metric_views_week.getLast()); - assertFalse(old_metric_downloads_week.getLast()); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + String[] args = new String[] {"store-metrics"}; + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); + assertEquals(0, status); + CrisMetrics metric_view_item = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "view", item.getID()); + CrisMetrics metric_download = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "download", item.getID()); + CrisMetrics metrics_views_comm = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "view", community.getID()); + // find previous metrics + CrisMetrics old_metric_views_month = crisMetriscService.find(context, + crisMetrics_previous_month_views_comm.getID()); + CrisMetrics old_metric_views_week = crisMetriscService.find(context, + crisMetrics_previous_week_views_comm.getID()); + CrisMetrics old_metric_downloads_week = crisMetriscService.find(context, + crisMetrics_previous_week_downloads.getID()); + + + //control download values + assertEquals("download", metric_download.getMetricType()); + assertEquals(1, metric_download.getMetricCount(), 0); + assertTrue(metric_download.getLast()); + assertTrue(metric_download.getDeltaPeriod1() == 0.0); + + assertEquals("view", metrics_views_comm.getMetricType()); + assertTrue(metrics_views_comm.getLast()); + assertEquals(2, metrics_views_comm.getMetricCount(), 0); + assertTrue(metrics_views_comm.getDeltaPeriod1() == 1.0); + assertTrue(metrics_views_comm.getDeltaPeriod2() == 1.0); + + assertEquals("view", metric_view_item.getMetricType()); + assertEquals(1, metric_view_item.getMetricCount(), 0); + assertTrue(metric_view_item.getDeltaPeriod1() == 0.0); + assertTrue(metric_view_item.getLast()); + + // all last values of previous must be false + assertFalse(old_metric_views_month.getLast()); + assertFalse(old_metric_views_week.getLast()); + assertFalse(old_metric_downloads_week.getLast()); + })); } //test with previous week and month views and downloads for community collection and items together @@ -552,64 +577,67 @@ context, item, toInputStream("test", UTF_8)) .content(mapper.writeValueAsBytes(viewEventRestBitstream)) .contentType(contentType)) .andExpect(status().isCreated()); - String[] args = new String[]{"store-metrics"}; - TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); - int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); - assertEquals(0, status); - CrisMetrics metric_view_item = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "view", item.getID()); - CrisMetrics metric_download = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "download", item.getID()); - CrisMetrics metrics_views_comm = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "view", community.getID()); - CrisMetrics metrics_views_cols = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "view", col1.getID()); - // find previous metrics - CrisMetrics old_metric_views_month = crisMetriscService.find(context, - crisMetrics_previous_month_views_comm.getID()); - CrisMetrics old_metric_views_week = crisMetriscService.find(context, - crisMetrics_previous_week_views_comm.getID()); - CrisMetrics old_metric_downloads_week = crisMetriscService.find(context, - crisMetrics_previous_week_downloads.getID()); - CrisMetrics old_metric_view_week_items = crisMetriscService.find(context, - crisMetrics_previous_week_views_item.getID()); - CrisMetrics old_metric_view_week_col = crisMetriscService.find(context, - crisMetrics_previous_week_views_col.getID()); - CrisMetrics old_metric_view_month_col = crisMetriscService.find(context, - crisMetrics_previous_month_views_col.getID()); - - //control download values - assertEquals("download", metric_download.getMetricType()); - assertEquals(1, metric_download.getMetricCount(), 0); - assertTrue(metric_download.getLast()); - assertTrue(metric_download.getDeltaPeriod1() == 0.0); - - assertEquals("view", metrics_views_comm.getMetricType()); - assertTrue(metrics_views_comm.getLast()); - assertEquals(2, metrics_views_comm.getMetricCount(), 0); - assertTrue(metrics_views_comm.getDeltaPeriod1() == 1.0); - assertTrue(metrics_views_comm.getDeltaPeriod2() == 1.0); - - assertEquals("view", metric_view_item.getMetricType()); - assertEquals(1, metric_view_item.getMetricCount(), 0); - assertTrue(metric_view_item.getDeltaPeriod1() == 0.0); - assertTrue(metric_view_item.getLast()); - - - assertEquals("view", metrics_views_cols.getMetricType()); - assertEquals(2, metrics_views_cols.getMetricCount(), 0); - assertTrue(metrics_views_cols.getDeltaPeriod1() == 1.0); - assertTrue(metrics_views_cols.getDeltaPeriod2() == -1.0); - assertTrue(metrics_views_cols.getLast()); - - - // all last values of previous must be false - assertFalse(old_metric_views_month.getLast()); - assertFalse(old_metric_views_week.getLast()); - assertFalse(old_metric_downloads_week.getLast()); - assertFalse(old_metric_view_week_items.getLast()); - assertFalse(old_metric_view_week_col.getLast()); - assertFalse(old_metric_view_month_col.getLast()); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + String[] args = new String[] {"store-metrics"}; + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); + assertEquals(0, status); + CrisMetrics metric_view_item = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "view", item.getID()); + CrisMetrics metric_download = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "download", item.getID()); + CrisMetrics metrics_views_comm = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "view", community.getID()); + CrisMetrics metrics_views_cols = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "view", col1.getID()); + // find previous metrics + CrisMetrics old_metric_views_month = crisMetriscService.find(context, + crisMetrics_previous_month_views_comm.getID()); + CrisMetrics old_metric_views_week = crisMetriscService.find(context, + crisMetrics_previous_week_views_comm.getID()); + CrisMetrics old_metric_downloads_week = crisMetriscService.find(context, + crisMetrics_previous_week_downloads.getID()); + CrisMetrics old_metric_view_week_items = crisMetriscService.find(context, + crisMetrics_previous_week_views_item.getID()); + CrisMetrics old_metric_view_week_col = crisMetriscService.find(context, + crisMetrics_previous_week_views_col.getID()); + CrisMetrics old_metric_view_month_col = crisMetriscService.find(context, + crisMetrics_previous_month_views_col.getID()); + + //control download values + assertEquals("download", metric_download.getMetricType()); + assertEquals(1, metric_download.getMetricCount(), 0); + assertTrue(metric_download.getLast()); + assertTrue(metric_download.getDeltaPeriod1() == 0.0); + + assertEquals("view", metrics_views_comm.getMetricType()); + assertTrue(metrics_views_comm.getLast()); + assertEquals(2, metrics_views_comm.getMetricCount(), 0); + assertTrue(metrics_views_comm.getDeltaPeriod1() == 1.0); + assertTrue(metrics_views_comm.getDeltaPeriod2() == 1.0); + + assertEquals("view", metric_view_item.getMetricType()); + assertEquals(1, metric_view_item.getMetricCount(), 0); + assertTrue(metric_view_item.getDeltaPeriod1() == 0.0); + assertTrue(metric_view_item.getLast()); + + + assertEquals("view", metrics_views_cols.getMetricType()); + assertEquals(2, metrics_views_cols.getMetricCount(), 0); + assertTrue(metrics_views_cols.getDeltaPeriod1() == 1.0); + assertTrue(metrics_views_cols.getDeltaPeriod2() == -1.0); + assertTrue(metrics_views_cols.getLast()); + + + // all last values of previous must be false + assertFalse(old_metric_views_month.getLast()); + assertFalse(old_metric_views_week.getLast()); + assertFalse(old_metric_downloads_week.getLast()); + assertFalse(old_metric_view_week_items.getLast()); + assertFalse(old_metric_view_week_col.getLast()); + assertFalse(old_metric_view_month_col.getLast()); + })); } //test with previous week and month views and downloads for community and collection @@ -682,41 +710,45 @@ public void storeCrisMetricsForCommunityAndCollectionWithViewWithPreviousWeekAnd .content(mapper.writeValueAsBytes(viewEventRestColl)) .contentType(contentType)) .andExpect(status().isCreated()); - String[] args = new String[]{"store-metrics"}; - TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); - int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); - assertEquals(0, status); - CrisMetrics metrics_views_comm = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "view", community.getID()); - CrisMetrics metrics_views_cols = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "view", col1.getID()); - // find previous metrics - CrisMetrics old_metric_views_month = crisMetriscService.find(context, - crisMetrics_previous_month_views_comm.getID()); - CrisMetrics old_metric_views_week = crisMetriscService.find(context, - crisMetrics_previous_week_views_comm.getID()); - CrisMetrics old_metric_view_week_col = crisMetriscService.find(context, - crisMetrics_previous_week_views_col.getID()); - CrisMetrics old_metric_view_month_col = crisMetriscService.find(context, - crisMetrics_previous_month_views_col.getID()); - - - assertEquals("view", metrics_views_comm.getMetricType()); - assertTrue(metrics_views_comm.getLast()); - assertEquals(2, metrics_views_comm.getMetricCount(), 0); - assertTrue(metrics_views_comm.getDeltaPeriod1() == 1.0); - assertTrue(metrics_views_comm.getDeltaPeriod2() == 0); - - assertEquals("view", metrics_views_cols.getMetricType()); - assertEquals(2, metrics_views_cols.getMetricCount(), 0); - assertTrue(metrics_views_cols.getDeltaPeriod1() == 0); - assertTrue(metrics_views_cols.getDeltaPeriod2() == -1.0); - assertTrue(metrics_views_cols.getLast()); - - // all last values of previous must be false - assertFalse(old_metric_views_month.getLast()); - assertFalse(old_metric_views_week.getLast()); - assertFalse(old_metric_view_week_col.getLast()); - assertFalse(old_metric_view_month_col.getLast()); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + String[] args = new String[] {"store-metrics"}; + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); + assertEquals(0, status); + CrisMetrics metrics_views_comm = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "view", community.getID()); + CrisMetrics metrics_views_cols = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "view", col1.getID()); + // find previous metrics + CrisMetrics old_metric_views_month = crisMetriscService.find(context, + crisMetrics_previous_month_views_comm.getID()); + CrisMetrics old_metric_views_week = crisMetriscService.find(context, + crisMetrics_previous_week_views_comm.getID()); + CrisMetrics old_metric_view_week_col = crisMetriscService.find(context, + crisMetrics_previous_week_views_col.getID()); + CrisMetrics old_metric_view_month_col = crisMetriscService.find(context, + crisMetrics_previous_month_views_col.getID()); + + + assertEquals("view", metrics_views_comm.getMetricType()); + assertTrue(metrics_views_comm.getLast()); + assertEquals(2, metrics_views_comm.getMetricCount(), 0); + assertTrue(metrics_views_comm.getDeltaPeriod1() == 1.0); + assertTrue(metrics_views_comm.getDeltaPeriod2() == 0); + + assertEquals("view", metrics_views_cols.getMetricType()); + assertEquals(2, metrics_views_cols.getMetricCount(), 0); + assertTrue(metrics_views_cols.getDeltaPeriod1() == 0); + assertTrue(metrics_views_cols.getDeltaPeriod2() == -1.0); + assertTrue(metrics_views_cols.getLast()); + + // all last values of previous must be false + assertFalse(old_metric_views_month.getLast()); + assertFalse(old_metric_views_week.getLast()); + assertFalse(old_metric_view_week_col.getLast()); + assertFalse(old_metric_view_month_col.getLast()); + })); } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ViewEventRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ViewEventRestRepositoryIT.java index 5683bd30a84e..d49a4ce857d4 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ViewEventRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ViewEventRestRepositoryIT.java @@ -7,6 +7,7 @@ */ package org.dspace.app.rest; +import static org.junit.Assert.assertEquals; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @@ -17,6 +18,9 @@ import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.codec.CharEncoding; import org.apache.commons.io.IOUtils; +import org.apache.solr.client.solrj.SolrQuery; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.common.SolrDocumentList; import org.dspace.app.rest.model.ViewEventRest; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.builder.BitstreamBuilder; @@ -29,10 +33,14 @@ import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.content.Site; +import org.dspace.statistics.SolrStatisticsCore; +import org.dspace.utils.DSpace; import org.junit.Test; public class ViewEventRestRepositoryIT extends AbstractControllerIntegrationTest { + private final SolrStatisticsCore solrStatisticsCore = new DSpace().getSingletonService(SolrStatisticsCore.class); + @Test public void findAllTestThrowNotImplementedException() throws Exception { @@ -494,5 +502,52 @@ public void postTestAuthenticatedUserSuccess() throws Exception { } + @Test + public void postTestReferrer() throws Exception { + + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and two collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build(); + + //2. Three public items that are readable by Anonymous with different subjects + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .withSubject("ExtraEntry") + .build(); + + context.restoreAuthSystemState(); + + ViewEventRest viewEventRest = new ViewEventRest(); + viewEventRest.setTargetType("item"); + viewEventRest.setTargetId(publicItem1.getID()); + viewEventRest.setReferrer("test-referrer"); + + ObjectMapper mapper = new ObjectMapper(); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest)) + .contentType(contentType)) + .andExpect(status().isCreated()); + solrStatisticsCore.getSolr().commit(); + + // Query all statistics and verify it contains a document with the correct referrer + SolrQuery solrQuery = new SolrQuery("*:*"); + QueryResponse queryResponse = solrStatisticsCore.getSolr().query(solrQuery); + SolrDocumentList responseList = queryResponse.getResults(); + assertEquals(1, responseList.size()); + assertEquals("test-referrer", responseList.get(0).get("referrer")); + } + } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/VocabularyRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/VocabularyRestRepositoryIT.java index 1ff29a2ba228..1eedda87f990 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/VocabularyRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/VocabularyRestRepositoryIT.java @@ -40,6 +40,7 @@ import org.dspace.content.Item; import org.dspace.content.authority.DCInputAuthority; import org.dspace.content.authority.service.ChoiceAuthorityService; +import org.dspace.content.authority.service.MetadataAuthorityService; import org.dspace.content.edit.EditItem; import org.dspace.core.service.PluginService; import org.dspace.services.ConfigurationService; @@ -58,6 +59,9 @@ public class VocabularyRestRepositoryIT extends AbstractControllerIntegrationTes @Autowired private ConfigurationService configurationService; + @Autowired + private MetadataAuthorityService metadataAuthorityService; + @Autowired private SubmissionFormRestRepository submissionFormRestRepository; @@ -149,22 +153,22 @@ public void destroy() throws Exception { public void findAllTest() throws Exception { String token = getAuthToken(admin.getEmail(), password); getClient(token).perform(get("/api/submission/vocabularies")) - .andExpect(status().isOk()) + .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.vocabularies", Matchers.containsInAnyOrder( VocabularyMatcher.matchProperties("srsc", "srsc", false, true), + VocabularyMatcher.matchProperties("common_iso_languages", "common_iso_languages", true, false), + VocabularyMatcher.matchProperties("SRPublisher", "SRPublisher", false, false), + VocabularyMatcher.matchProperties("patent_types", "patent_types", true, false), + VocabularyMatcher.matchProperties("types", "types", false, true), + VocabularyMatcher.matchProperties("gender", "gender", true, false), + VocabularyMatcher.matchProperties("SolrAuthorAuthority", "SolrAuthorAuthority", false, false), + VocabularyMatcher.matchProperties("SRJournalTitle", "SRJournalTitle", false, false), VocabularyMatcher.matchProperties("common_types", "common_types", true, false), - VocabularyMatcher.matchProperties("common_iso_languages", "common_iso_languages", true , false), - VocabularyMatcher.matchProperties("SolrAuthorAuthority", "SolrAuthorAuthority", false , false), - VocabularyMatcher.matchProperties("patent_types", "patent_types", true , false), - VocabularyMatcher.matchProperties("types", "types", false , true), - VocabularyMatcher.matchProperties("gender", "gender", true , false), - VocabularyMatcher.matchProperties("SRPublisher", "SRPublisher", false , false), - VocabularyMatcher.matchProperties("SRJournalTitle", "SRJournalTitle", false , false), - VocabularyMatcher.matchProperties("publication-coar-types", "publication-coar-types", false , true) - ))) - .andExpect(jsonPath("$._links.self.href", - Matchers.containsString("api/submission/vocabularies"))) - .andExpect(jsonPath("$.page.totalElements", is(10))); + VocabularyMatcher.matchProperties("publication-coar-types", "publication-coar-types", false, true) + ))) + .andExpect(jsonPath("$._links.self.href", + Matchers.containsString("api/submission/vocabularies"))) + .andExpect(jsonPath("$.page.totalElements", is(10))); } @Test @@ -506,140 +510,160 @@ public void linkedEntitiesWithFilterAndEntryIdTest() throws Exception { @Test public void controlledVocabularyWithHierarchyStoreSetTrueTest() throws Exception { context.turnOffAuthorisationSystem(); - String vocabularyName = "publication-coar-types"; - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Root Community") - .build(); - - Collection col = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("Publication") - .withName("Collection 1") - .build(); - - Item itemA = ItemBuilder.createItem(context, col) - .withTitle("Test Item A") - .withIssueDate("2023-04-04") - .withType("Resource Types::text::book::book part", vocabularyName + ":c_3248") - .build(); - - EditItem editItem = new EditItem(context, itemA); - - context.restoreAuthSystemState(); - - String tokenAdmin = getAuthToken(admin.getEmail(), password); - - getClient(tokenAdmin).perform(get("/api/core/items/" + itemA.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.metadata", Matchers.allOf( - hasJsonPath("$['dc.title'][0].value", is("Test Item A")), - hasJsonPath("$['dc.type'][0].value", is("Resource Types::text::book::book part")), - hasJsonPath("$['dc.type'][0].authority", is(vocabularyName + ":c_3248")), - hasJsonPath("$['dc.type'][0].confidence", is(600)) - ))); - - AtomicReference selectedLeafValue = new AtomicReference<>(); - AtomicReference selectedLeafauthority = new AtomicReference<>(); - - getClient(tokenAdmin).perform(get("/api/submission/vocabularies/" + vocabularyName + "/entries") - .param("metadata", "dc.type") - .param("entryID", vocabularyName + ":c_b239")) - .andExpect(status().isOk()) - .andDo(result -> selectedLeafValue.set(read(result.getResponse().getContentAsString(), - "$._embedded.entries[0].value"))) - .andDo(result -> selectedLeafauthority.set(read(result.getResponse().getContentAsString(), - "$._embedded.entries[0].authority"))); - - List operations = new ArrayList(); - Map value = new HashMap(); - value.put("value", selectedLeafValue.get()); - value.put("authority", selectedLeafauthority.get()); - value.put("confidence", "600"); - operations.add(new ReplaceOperation("/sections/controlled-vocabulary-test/dc.type/0", value)); - - String patchBody = getPatchContent(operations); - getClient(tokenAdmin).perform(patch("/api/core/edititems/" + editItem.getID() + ":MODE-VOC") - .content(patchBody) - .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) - .andExpect(status().isOk()); - - getClient(tokenAdmin).perform(get("/api/core/items/" + itemA.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.metadata", Matchers.allOf( - hasJsonPath("$['dc.title'][0].value", is("Test Item A")), - hasJsonPath("$['dc.type'][0].value", is("text::journal::editorial")), - hasJsonPath("$['dc.type'][0].authority", is(vocabularyName + ":c_b239")), - hasJsonPath("$['dc.type'][0].confidence", is(600)) - ))); + try { + configurationService.setProperty("authority.controlled.dc.type", "true"); + metadataAuthorityService.clearCache(); + + String vocabularyName = "publication-coar-types"; + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Root Community") + .build(); + + Collection col = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("Publication") + .withName("Collection 1") + .build(); + + Item itemA = ItemBuilder.createItem(context, col) + .withTitle("Test Item A") + .withIssueDate("2023-04-04") + .withType("Resource Types::text::book::book part", vocabularyName + ":c_3248") + .build(); + + EditItem editItem = new EditItem(context, itemA); + + context.restoreAuthSystemState(); + + String tokenAdmin = getAuthToken(admin.getEmail(), password); + + getClient(tokenAdmin).perform(get("/api/core/items/" + itemA.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.metadata", Matchers.allOf( + hasJsonPath("$['dc.title'][0].value", is("Test Item A")), + hasJsonPath( + "$['dc.type'][0].value", is("Resource Types::text::book::book part")), + hasJsonPath("$['dc.type'][0].authority", is(vocabularyName + ":c_3248")), + hasJsonPath("$['dc.type'][0].confidence", is(600)) + ))); + + AtomicReference selectedLeafValue = new AtomicReference<>(); + AtomicReference selectedLeafauthority = new AtomicReference<>(); + + getClient(tokenAdmin).perform(get("/api/submission/vocabularies/" + vocabularyName + "/entries") + .param("metadata", "dc.type") + .param("entryID", vocabularyName + ":c_b239")) + .andExpect(status().isOk()) + .andDo(result -> selectedLeafValue.set(read(result.getResponse().getContentAsString(), + "$._embedded.entries[0].value"))) + .andDo(result -> selectedLeafauthority.set( + read(result.getResponse().getContentAsString(), + "$._embedded.entries[0].authority"))); + + List operations = new ArrayList(); + Map value = new HashMap(); + value.put("value", selectedLeafValue.get()); + value.put("authority", selectedLeafauthority.get()); + value.put("confidence", "600"); + operations.add(new ReplaceOperation("/sections/controlled-vocabulary-test/dc.type/0", value)); + + String patchBody = getPatchContent(operations); + getClient(tokenAdmin).perform(patch("/api/core/edititems/" + editItem.getID() + ":MODE-VOC") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()); + + getClient(tokenAdmin).perform(get("/api/core/items/" + itemA.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.metadata", Matchers.allOf( + hasJsonPath("$['dc.title'][0].value", is("Test Item A")), + hasJsonPath("$['dc.type'][0].value", is("text::journal::editorial")), + hasJsonPath("$['dc.type'][0].authority", is(vocabularyName + ":c_b239")), + hasJsonPath("$['dc.type'][0].confidence", is(600)) + ))); + } finally { + configurationService.setProperty("authority.controlled.dc.type", "false"); + metadataAuthorityService.clearCache(); + } } @Test public void controlledVocabularyWithHierarchyStoreSetFalseTest() throws Exception { context.turnOffAuthorisationSystem(); - String vocabularyName = "publication-coar-types"; - configurationService.setProperty("vocabulary.plugin." + vocabularyName + ".hierarchy.store", false); - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Root Community") - .build(); - - Collection col = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("Publication") - .withName("Collection 1") - .build(); - - Item itemA = ItemBuilder.createItem(context, col) - .withTitle("Test Item A") - .withIssueDate("2023-04-04") - .withType("Resource Types::text::book::book part", vocabularyName + ":c_3248") - .build(); - - EditItem editItem = new EditItem(context, itemA); - - context.restoreAuthSystemState(); - - String tokenAdmin = getAuthToken(admin.getEmail(), password); - - getClient(tokenAdmin).perform(get("/api/core/items/" + itemA.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.metadata", Matchers.allOf( - hasJsonPath("$['dc.title'][0].value", is("Test Item A")), - hasJsonPath("$['dc.type'][0].value", is("Resource Types::text::book::book part")), - hasJsonPath("$['dc.type'][0].authority", is(vocabularyName + ":c_3248")), - hasJsonPath("$['dc.type'][0].confidence", is(600)) - ))); - - AtomicReference selectedLeafValue = new AtomicReference<>(); - AtomicReference selectedLeafauthority = new AtomicReference<>(); - - getClient(tokenAdmin).perform(get("/api/submission/vocabularies/" + vocabularyName + "/entries") - .param("metadata", "dc.type") - .param("entryID", vocabularyName + ":c_b239")) - .andExpect(status().isOk()) - .andDo(result -> selectedLeafValue.set(read(result.getResponse().getContentAsString(), - "$._embedded.entries[0].value"))) - .andDo(result -> selectedLeafauthority.set(read(result.getResponse().getContentAsString(), - "$._embedded.entries[0].authority"))); - - List operations = new ArrayList(); - Map value = new HashMap(); - value.put("value", selectedLeafValue.get()); - value.put("authority", selectedLeafauthority.get()); - value.put("confidence", "600"); - operations.add(new ReplaceOperation("/sections/controlled-vocabulary-test/dc.type/0", value)); - - String patchBody = getPatchContent(operations); - getClient(tokenAdmin).perform(patch("/api/core/edititems/" + editItem.getID() + ":MODE-VOC") - .content(patchBody) - .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) - .andExpect(status().isOk()); - - getClient(tokenAdmin).perform(get("/api/core/items/" + itemA.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.metadata", Matchers.allOf( - hasJsonPath("$['dc.title'][0].value", is("Test Item A")), - hasJsonPath("$['dc.type'][0].value", is("editorial")), - hasJsonPath("$['dc.type'][0].authority", is(vocabularyName + ":c_b239")), - hasJsonPath("$['dc.type'][0].confidence", is(600)) - ))); + try { + configurationService.setProperty("authority.controlled.dc.type", "true"); + metadataAuthorityService.clearCache(); + + String vocabularyName = "publication-coar-types"; + configurationService.setProperty("vocabulary.plugin." + vocabularyName + ".hierarchy.store", false); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Root Community") + .build(); + + Collection col = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("Publication") + .withName("Collection 1") + .build(); + + Item itemA = ItemBuilder.createItem(context, col) + .withTitle("Test Item A") + .withIssueDate("2023-04-04") + .withType("Resource Types::text::book::book part", vocabularyName + ":c_3248") + .build(); + + EditItem editItem = new EditItem(context, itemA); + + context.restoreAuthSystemState(); + + String tokenAdmin = getAuthToken(admin.getEmail(), password); + + getClient(tokenAdmin).perform(get("/api/core/items/" + itemA.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.metadata", Matchers.allOf( + hasJsonPath("$['dc.title'][0].value", is("Test Item A")), + hasJsonPath("$['dc.type'][0].value", + is("Resource Types::text::book::book part")), + hasJsonPath("$['dc.type'][0].authority", is(vocabularyName + ":c_3248")), + hasJsonPath("$['dc.type'][0].confidence", is(600)) + ))); + + AtomicReference selectedLeafValue = new AtomicReference<>(); + AtomicReference selectedLeafauthority = new AtomicReference<>(); + + getClient(tokenAdmin).perform(get("/api/submission/vocabularies/" + vocabularyName + "/entries") + .param("metadata", "dc.type") + .param("entryID", vocabularyName + ":c_b239")) + .andExpect(status().isOk()) + .andDo(result -> selectedLeafValue.set(read(result.getResponse().getContentAsString(), + "$._embedded.entries[0].value"))) + .andDo(result -> selectedLeafauthority.set( + read(result.getResponse().getContentAsString(), + "$._embedded.entries[0].authority"))); + + List operations = new ArrayList(); + Map value = new HashMap(); + value.put("value", selectedLeafValue.get()); + value.put("authority", selectedLeafauthority.get()); + value.put("confidence", "600"); + operations.add(new ReplaceOperation("/sections/controlled-vocabulary-test/dc.type/0", value)); + + String patchBody = getPatchContent(operations); + getClient(tokenAdmin).perform(patch("/api/core/edititems/" + editItem.getID() + ":MODE-VOC") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()); + + getClient(tokenAdmin).perform(get("/api/core/items/" + itemA.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.metadata", Matchers.allOf( + hasJsonPath("$['dc.title'][0].value", is("Test Item A")), + hasJsonPath("$['dc.type'][0].value", is("editorial")), + hasJsonPath("$['dc.type'][0].authority", is(vocabularyName + ":c_b239")), + hasJsonPath("$['dc.type'][0].confidence", is(600)) + ))); + } finally { + configurationService.setProperty("authority.controlled.dc.type", "false"); + metadataAuthorityService.clearCache(); + } } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java index c7725805687d..c6b4821a950f 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java @@ -60,9 +60,9 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.jayway.jsonpath.matchers.JsonPathMatchers; import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringEscapeUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.time.DateUtils; +import org.apache.commons.text.StringEscapeUtils; import org.dspace.app.rest.matcher.CollectionMatcher; import org.dspace.app.rest.matcher.ItemMatcher; import org.dspace.app.rest.matcher.MetadataMatcher; @@ -112,6 +112,7 @@ import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.supervision.SupervisionOrder; import org.dspace.util.UUIDUtils; +import org.dspace.validation.LicenseValidator; import org.dspace.versioning.ItemCorrectionProvider; import org.hamcrest.Matchers; import org.junit.Before; @@ -130,6 +131,8 @@ */ public class WorkspaceItemRestRepositoryIT extends AbstractControllerIntegrationTest { + public static final String LICENSE_ERROR_MESSAGE_PATH = + "$.errors[?(@.message=='" + LicenseValidator.ERROR_VALIDATION_LICENSEREQUIRED + "')]"; @Autowired private CollectionService cs; @Autowired @@ -2027,27 +2030,109 @@ public void createSingleWorkspaceItemsFromSingleFileWithMultipleEntriesTest() th Collection col1 = CollectionBuilder.createCollection(context, child1) .withName("Collection 1") .withSubmitterGroup(eperson) + .withEntityType("Publication") + .withSubmissionDefinition("traditional") .build(); Collection col2 = CollectionBuilder.createCollection(context, child1) .withName("Collection 2") .withSubmitterGroup(eperson) + .withEntityType("Publication") + .withSubmissionDefinition("traditional") .build(); - InputStream bibtex = getClass().getResourceAsStream("bibtex-test-3-entries.bib"); - final MockMultipartFile bibtexFile = new MockMultipartFile("file", "bibtex-test-3-entries.bib", - "application/x-bibtex", - bibtex); + try (InputStream bibtex = getClass().getResourceAsStream("bibtex-test-3-entries.bib")) { + final MockMultipartFile bibtexFile = + new MockMultipartFile( + "file", "bibtex-test-3-entries.bib", + "application/x-bibtex", bibtex + ); - context.restoreAuthSystemState(); + context.restoreAuthSystemState(); - String authToken = getAuthToken(eperson.getEmail(), password); - // create a workspaceitem from a single bibliographic entry file explicitly in the default collection (col1) - getClient(authToken).perform(multipart("/api/submission/workspaceitems") - .file(bibtexFile)) - // create should return return a 422 because we don't allow/support bibliographic files - // that have multiple metadata records - .andExpect(status().is(422)); - bibtex.close(); + String authToken = getAuthToken(eperson.getEmail(), password); + // create a workspaceitem from a single bibliographic entry file explicitly in the default collection (col1) + getClient(authToken) + .perform( + multipart("/api/submission/workspaceitems").file(bibtexFile) + ) + // bulk create should return 200, 201 (created) is better for single resource + .andExpect(status().isOk()) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[0].sections.traditionalpageone['dc.title'][0].value", + is("My Article") + ) + ) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[0]._embedded.collection.id", + is(col1.getID().toString()) + ) + ) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[1].sections.traditionalpageone['dc.title'][0].value", + is("My Article 2") + ) + ) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[1]._embedded.collection.id", + is(col1.getID().toString()) + ) + ) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[2].sections.traditionalpageone['dc.title'][0].value", + is("My Article 3") + ) + ) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[2]._embedded.collection.id", + is(col1.getID().toString()) + ) + ) + .andExpect( + jsonPath("$._embedded.workspaceitems[*]._embedded.upload").doesNotExist()); + getClient(authToken) + .perform( + multipart("/api/submission/workspaceitems") + .file(bibtexFile) + .param("owningCollection", col2.getID().toString()) + ) + .andExpect(status().isOk()) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[0].sections.traditionalpageone['dc.title'][0].value", + is("My Article") + ) + ) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[0]._embedded.collection.id", + is(col2.getID().toString()) + ) + ) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[1].sections.traditionalpageone['dc.title'][0].value", + is("My Article 2") + ) + ) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[1]._embedded.collection.id", + is(col2.getID().toString()) + ) + ) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[2].sections.traditionalpageone['dc.title'][0].value", + is("My Article 3") + ) + ); + } } @Test @@ -4515,8 +4600,18 @@ public void patchRejectLicenseTest() throws Exception { .content(patchBody) .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) .andExpect(status().isOk()) - .andExpect(jsonPath("$.errors[?(@.message=='error.validation.license.notgranted')]", - contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) + .andExpect( + jsonPath( + + LICENSE_ERROR_MESSAGE_PATH, + contains( + hasJsonPath( + "$.paths", + contains(hasJsonPath("$", is("/sections/license"))) + ) + ) + ) + ) .andExpect(jsonPath("$.sections.license.granted", is(false))) .andExpect(jsonPath("$.sections.license.acceptanceDate").isEmpty()) @@ -4526,8 +4621,8 @@ public void patchRejectLicenseTest() throws Exception { // verify that the patch changes have been persisted getClient(authToken).perform(get("/api/submission/workspaceitems/" + witem.getID())) .andExpect(status().isOk()) - .andExpect(jsonPath("$.errors[?(@.message=='error.validation.license.notgranted')]", - contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) + .andExpect(jsonPath(LICENSE_ERROR_MESSAGE_PATH, + contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) .andExpect(jsonPath("$.sections.license.granted", is(false))) .andExpect(jsonPath("$.sections.license.acceptanceDate").isEmpty()) @@ -4543,8 +4638,12 @@ public void patchRejectLicenseTest() throws Exception { .content(patchBody) .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) .andExpect(status().isOk()) - .andExpect(jsonPath("$.errors[?(@.message=='error.validation.license.notgranted')]", - contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) + .andExpect(jsonPath(LICENSE_ERROR_MESSAGE_PATH, + contains( + hasJsonPath("$.paths", + contains(hasJsonPath("$", is("/sections/license")))) + ) + )) .andExpect(jsonPath("$.sections.license.granted", is(false))) .andExpect(jsonPath("$.sections.license.acceptanceDate").isEmpty()) @@ -4554,8 +4653,8 @@ public void patchRejectLicenseTest() throws Exception { // verify that the patch changes have been persisted getClient(authToken).perform(get("/api/submission/workspaceitems/" + witem2.getID())) .andExpect(status().isOk()) - .andExpect(jsonPath("$.errors[?(@.message=='error.validation.license.notgranted')]", - contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) + .andExpect(jsonPath(LICENSE_ERROR_MESSAGE_PATH, + contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) .andExpect(jsonPath("$.sections.license.granted", is(false))) .andExpect(jsonPath("$.sections.license.acceptanceDate").isEmpty()) @@ -4571,8 +4670,13 @@ public void patchRejectLicenseTest() throws Exception { .content(patchBody) .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) .andExpect(status().isOk()) - .andExpect(jsonPath("$.errors[?(@.message=='error.validation.license.notgranted')]", - contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) + .andExpect(jsonPath(LICENSE_ERROR_MESSAGE_PATH, + contains( + hasJsonPath("$.paths", + contains(hasJsonPath("$", is("/sections/license"))) + ) + ) + )) .andExpect(jsonPath("$.sections.license.granted", is(false))) .andExpect(jsonPath("$.sections.license.acceptanceDate").isEmpty()) @@ -4582,8 +4686,13 @@ public void patchRejectLicenseTest() throws Exception { // verify that the patch changes have been persisted getClient(authToken).perform(get("/api/submission/workspaceitems/" + witem3.getID())) .andExpect(status().isOk()) - .andExpect(jsonPath("$.errors[?(@.message=='error.validation.license.notgranted')]", - contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) + .andExpect(jsonPath(LICENSE_ERROR_MESSAGE_PATH, + contains( + hasJsonPath("$.paths", + contains(hasJsonPath("$", is("/sections/license"))) + ) + ) + )) .andExpect(jsonPath("$.sections.license.granted", is(false))) .andExpect(jsonPath("$.sections.license.acceptanceDate").isEmpty()) @@ -4599,8 +4708,14 @@ public void patchRejectLicenseTest() throws Exception { .content(patchBody) .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) .andExpect(status().isOk()) - .andExpect(jsonPath("$.errors[?(@.message=='error.validation.license.notgranted')]", - contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) + .andExpect(jsonPath(LICENSE_ERROR_MESSAGE_PATH, + contains( + hasJsonPath( + "$.paths", + contains(hasJsonPath("$", is("/sections/license"))) + ) + ) + )) .andExpect(jsonPath("$.sections.license.granted", is(false))) .andExpect(jsonPath("$.sections.license.acceptanceDate").isEmpty()) @@ -4610,8 +4725,8 @@ public void patchRejectLicenseTest() throws Exception { // verify that the patch changes have been persisted getClient(authToken).perform(get("/api/submission/workspaceitems/" + witem4.getID())) .andExpect(status().isOk()) - .andExpect(jsonPath("$.errors[?(@.message=='error.validation.license.notgranted')]", - contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) + .andExpect(jsonPath(LICENSE_ERROR_MESSAGE_PATH, + contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) .andExpect(jsonPath("$.sections.license.granted", is(false))) .andExpect(jsonPath("$.sections.license.acceptanceDate").isEmpty()) @@ -10040,5 +10155,4 @@ public void patchBySupervisorTest() throws Exception { "ExtraEntry") ))); } - } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java index a93a964d36de..0ac7eea4250d 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java @@ -37,6 +37,7 @@ import org.junit.Before; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.web.servlet.ResultActions; /** * Test for the following authorization features: @@ -52,6 +53,8 @@ */ public class GenericAuthorizationFeatureIT extends AbstractControllerIntegrationTest { + private static final int SIZE = 100; + @Autowired ConfigurationService configurationService; @@ -208,215 +211,163 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { String siteId = ContentServiceFactory.getInstance().getSiteService().findSite(context).getID().toString(); // Verify the general admin has this feature on the site - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/sites/" + siteId)) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/sites/" + siteId) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin doesn’t have this feature on the site - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/sites/" + siteId)) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/sites/" + siteId) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on community A - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(adminToken,"http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on community A - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on community AA - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityAA.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/communities/" + communityAA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin doesn’t have this feature on community A - getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify community A admin doesn’t have this feature on community B - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityB.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/communities/" + communityB.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on collection X - getClient(adminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on collection X - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on collection X - getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin doesn’t have this feature on collection X - getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X admin doesn’t have this feature on collection Y - getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionY.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/collections/" + collectionY.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on item 1 - getClient(adminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on item 1 - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on item 1 - getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin has this feature on item 1 - getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin doesn’t have this feature on item 2 - getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item2.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/items/" + item2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on the bundle in item 1 - getClient(adminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on the bundle in item 1 - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on the bundle in item 1 - getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin has this feature on the bundle in item 1 - getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin doesn’t have this feature on the bundle in item 2 - getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle2.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/bundles/" + bundle2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on the bitstream in item 1 - getClient(adminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on the bitstream in item 1 - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on the bitstream in item 1 - getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin has this feature on the bitstream in item 1 - getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin doesn’t have this feature on the bitstream in item 2 - getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream2.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/bitstreams/" + bitstream2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -429,41 +380,31 @@ private void testAdminsHavePermissionsItem(String feature) throws Exception { String item1AdminToken = getAuthToken(item1Admin.getEmail(), password); // Verify the general admin has this feature on item 1 - getClient(adminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on item 1 - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on item 1 - getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin has this feature on item 1 - getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin doesn’t have this feature on item 2 - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item2.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/items/" + item2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -478,73 +419,55 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify community A write has this feature on community A if the boolean parameter is true // (or doesn’t have access otherwise) if (hasDSOAccess) { - getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); } else { - getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); } // Verify community A write doesn’t have this feature on community AA - getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityAA.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/communities/" + communityAA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify community A write doesn’t have this feature on collection X - getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify community A write doesn’t have this feature on item 1 - getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify community A write doesn’t have this feature on the bundle in item 1 - getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify community A write doesn’t have this feature on the bitstream in item 1 - getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X write doesn’t have this feature on community A - getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X write doesn’t have this feature on community AA - getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityAA.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/communities/" + communityAA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -552,65 +475,49 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify collection X write has this feature on collection X if the boolean parameter is true // (or doesn’t have access otherwise) if (hasDSOAccess) { - getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); } else { - getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); } // Verify collection X write doesn’t have this feature on item 1 - getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X write doesn’t have this feature on the bundle in item 1 - getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X write doesn’t have this feature on the bitstream in item 1 - getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 write doesn’t have this feature on community A - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 write doesn’t have this feature on community AA - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityAA.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/communities/" + communityAA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 write doesn’t have this feature on collection X - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -618,57 +525,43 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify item 1 write has this feature on item 1 if the boolean parameter is true // (or doesn’t have access otherwise) if (hasDSOAccess) { - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); } else { - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); } // Verify item 1 write doesn’t have this feature on the bundle in item 1 - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 write doesn’t have this feature on the bitstream in item 1 - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify community A write doesn’t have this feature on community B - getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityB.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/communities/" + communityB.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X write doesn’t have this feature on collection Y - getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionY.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/collections/" + collectionY.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 write doesn’t have this feature on item 2 - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item2.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/items/" + item2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -680,17 +573,13 @@ private void testWriteUsersHavePermissionsItem(String feature, boolean hasDSOAcc String item1WriterToken = getAuthToken(item1Writer.getEmail(), password); // Verify community A write doesn’t have this feature on item 1 - getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X write doesn’t have this feature on item 1 - getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -698,25 +587,19 @@ private void testWriteUsersHavePermissionsItem(String feature, boolean hasDSOAcc // Verify item 1 write has this feature on item 1 if the boolean parameter is true // (or doesn’t have access otherwise) if (hasDSOAccess) { - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); } else { - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); } // Verify item 1 write doesn’t have this feature on item 2 - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item2.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/items/" + item2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -754,42 +637,31 @@ public void testCanMoveAdmin() throws Exception { final String feature = "canMove"; // Verify the general admin has this feature on item 1 - getClient(adminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID()) - .param("size", "30")) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on item 1 - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on item 1 - getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin doesn’t have this feature on item 1 - getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify community A admin doesn’t have this feature on item 2 - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item2.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/items/" + item2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -806,9 +678,7 @@ public void testCanMoveAdmin() throws Exception { context.restoreAuthSystemState(); // verify item 1 write has this feature on item 1 - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='canMove')]") .exists()); @@ -829,9 +699,7 @@ public void testCanMoveWriter() throws Exception { String item1WriterToken = getAuthToken(item1Writer.getEmail(), password); // verify item 1 write has this feature on item 1 - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='canMove')]") .exists()); @@ -867,29 +735,25 @@ public void testCanDeleteAdmin() throws Exception { final String feature = "canDelete"; // Verify the general admin doesn’t have this feature on the site - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/sites/" + siteId)) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/sites/" + siteId) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on community A - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on community A - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on community AA - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityAA.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/communities/" + communityAA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); @@ -908,162 +772,139 @@ public void testCanDeleteAdmin() throws Exception { .build(); context.restoreAuthSystemState(); String communityAAAdminToken = getAuthToken(communityAAAdmin.getEmail(), password); - getClient(communityAAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityAA.getID())) + getAuthorizationFeatures(communityAAAdminToken, "http://localhost/api/core/communities/" + communityAA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X admin doesn’t have this feature on community A - getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify community A admin doesn’t have this feature on community B - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityB.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/communities/" + communityB.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on collection X - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on collection X - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin doesn’t have this feature on collection X - getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 admin doesn’t have this feature on collection X - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X admin doesn’t have this feature on collection Y - getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionY.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/collections/" + collectionY.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on item 1 - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on item 1 - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(communityAAdminToken,"http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on item 1 - getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(collectionXAdminToken,"http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin doesn’t have this feature on item 1 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 admin doesn’t have this feature on item 2 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item2.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/items/" + item2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on the bundle in item 1 - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on the bundle in item 1 - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on the bundle in item 1 - getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin has this feature on the bundle in item 1 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin doesn’t have this feature on the bundle in item 2 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle2.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/bundles/" + bundle2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on the bitstream in item 1 - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on the bitstream in item 1 - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on the bitstream in item 1 - getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin has this feature on the bitstream in item 1 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin doesn’t have this feature on the bitstream in item 2 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream2.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/bitstreams/" + bitstream2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1090,8 +931,7 @@ public void testCanDeleteAdminParent() throws Exception { context.restoreAuthSystemState(); String communityAAAdminToken = getAuthToken(communityAAAdmin.getEmail(), password); //verify the community AA admin has this feature on community AA - getClient(communityAAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityAA.getID())) + getAuthorizationFeatures(communityAAAdminToken, "http://localhost/api/core/communities/" + communityAA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); @@ -1105,8 +945,7 @@ public void testCanDeleteAdminParent() throws Exception { .build(); context.restoreAuthSystemState(); // verify collection X admin has this feature on collection X - getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); @@ -1120,8 +959,7 @@ public void testCanDeleteAdminParent() throws Exception { .build(); context.restoreAuthSystemState(); // verify item 1 admin has this feature on item 1 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); @@ -1151,14 +989,12 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String communityADeleterToken = getAuthToken(communityADeleter.getEmail(), password); // Verify the user has this feature on community A - getClient(communityADeleterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(communityADeleterToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify this user doesn’t have this feature on community AA - getClient(communityADeleterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityAA.getID())) + getAuthorizationFeatures(communityADeleterToken, "http://localhost/api/core/communities/" + communityAA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1179,20 +1015,17 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String communityARemoverToken = getAuthToken(communityARemover.getEmail(), password); // Verify the user has this feature on community AA - getClient(communityARemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityAA.getID())) + getAuthorizationFeatures(communityARemoverToken, "http://localhost/api/core/communities/" + communityAA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify this user doesn’t have this feature on community A - getClient(communityARemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(communityARemoverToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify this user doesn’t have this feature on collection X - getClient(communityARemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(communityARemoverToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1212,20 +1045,17 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String communityAARemoverToken = getAuthToken(communityAARemover.getEmail(), password); // Verify the user has this feature on collection X - getClient(communityAARemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(communityAARemoverToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify this user doesn’t have this feature on community AA - getClient(communityAARemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityAA.getID())) + getAuthorizationFeatures(communityAARemoverToken, "http://localhost/api/core/communities/" + communityAA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify this user doesn’t have this feature on item 1 - getClient(communityAARemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(communityAARemoverToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1245,8 +1075,7 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String collectionXRemoverToken = getAuthToken(collectionXRemover.getEmail(), password); // Verify the user doesn’t have this feature on item 1 - getClient(collectionXRemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(collectionXRemoverToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1266,8 +1095,7 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String item1DeleterToken = getAuthToken(item1Deleter.getEmail(), password); // Verify the user doesn’t have this feature on item 1 - getClient(item1DeleterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1DeleterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1292,23 +1120,17 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String collectionXRemoverItem1DeleterToken = getAuthToken(collectionXRemoverItem1Deleter.getEmail(), password); // Verify the user has this feature on item 1 - getClient(collectionXRemoverItem1DeleterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(collectionXRemoverItem1DeleterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify this user doesn’t have this feature on collection X - getClient(collectionXRemoverItem1DeleterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(collectionXRemoverItem1DeleterToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify this user doesn’t have this feature on the bundle in item 1 - getClient(collectionXRemoverItem1DeleterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(collectionXRemoverItem1DeleterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1328,20 +1150,17 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String item1RemoverToken = getAuthToken(item1Remover.getEmail(), password); // Verify the user has this feature on the bundle in item 1 - getClient(item1RemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(item1RemoverToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify this user doesn’t have this feature on item 1 - getClient(item1RemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1RemoverToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify this user doesn’t have this feature on the bitstream in item 1 - getClient(item1RemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(item1RemoverToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1361,8 +1180,7 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String bundle1RemoverToken = getAuthToken(bundle1Remover.getEmail(), password); // Verify the user doesn’t have this feature on the bitstream in item 1 - getClient(bundle1RemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(bundle1RemoverToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1388,8 +1206,7 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String bundle1item1RemoverToken = getAuthToken(bundle1item1Remover.getEmail(), password); // Verify the user has this feature on the bitstream in item 1 - getClient(bundle1item1RemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(bundle1item1RemoverToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1404,36 +1221,31 @@ public void testCanReorderBitstreamsAdmin() throws Exception { final String feature = "canReorderBitstreams"; // Verify the general admin has this feature on the bundle in item 1 - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on the bundle in item 1 - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on the bundle in item 1 - getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin has this feature on the bundle in item 1 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin doesn’t have this feature on the bundle in item 2 - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle2.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/bundles/" + bundle2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1447,28 +1259,24 @@ public void testCanReorderBitstreamsWriter() throws Exception { final String feature = "canReorderBitstreams"; // Verify community A write doesn’t have this feature on the bundle in item 1 - getClient(communityAWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X write doesn’t have this feature on the bundle in item 1 - getClient(collectionXWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 write doesn’t have this feature on the bundle in item 1 - getClient(item1WriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Create a new user, grant WRITE permissions on the bundle in item 1 to this user // Verify the user has this feature on the bundle in item 1 - getClient(communityAWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1483,36 +1291,31 @@ public void testCanCreateBitstreamAdmin() throws Exception { final String feature = "canCreateBitstream"; // Verify the general admin has this feature on the bundle in item 1 - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on the bundle in item 1 - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on the bundle in item 1 - getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin has this feature on the bundle in item 1 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin doesn’t have this feature on the bundle in item 2 - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle2.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/bundles/" + bundle2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1526,22 +1329,19 @@ public void testCanCreateBitstreamWriter() throws Exception { final String feature = "canCreateBitstream"; // Verify community A write doesn’t have this feature on the bundle in item 1 - getClient(communityAWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X write doesn’t have this feature on the bundle in item 1 - getClient(collectionXWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 write doesn’t have this feature on the bundle in item 1 - getClient(item1WriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1561,8 +1361,7 @@ public void testCanCreateBitstreamWriter() throws Exception { context.restoreAuthSystemState(); String bundle1WriterToken = getAuthToken(bundle1Writer.getEmail(), password); // Verify the user doesn’t have this feature on the bundle in item 1 - getClient(bundle1WriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(bundle1WriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1582,8 +1381,7 @@ public void testCanCreateBitstreamWriter() throws Exception { context.restoreAuthSystemState(); String bundle1AdderToken = getAuthToken(bundle1Adder.getEmail(), password); // Verify the user doesn’t have this feature on the bundle in item 1 - getClient(bundle1AdderToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(bundle1AdderToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1619,8 +1417,7 @@ public void testCanCreateBitstreamWriter() throws Exception { context.restoreAuthSystemState(); String bundle1WriterAdderToken = getAuthToken(bundle1WriterAdder.getEmail(), password); // Verify the user has this feature on the bundle in item 1 - getClient(bundle1WriterAdderToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(bundle1WriterAdderToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); @@ -1639,22 +1436,19 @@ public void testCanCreateBundleWriter() throws Exception { final String feature = "canCreateBundle"; // Verify community A write doesn’t have this feature on item 1 - getClient(communityAWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X write doesn’t have this feature on item 1 - getClient(collectionXWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 write doesn’t have this feature on item 1 - getClient(item1WriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1679,10 +1473,22 @@ public void testCanCreateBundleWriter() throws Exception { context.restoreAuthSystemState(); String item1AdderWriterToken = getAuthToken(item1AdderWriter.getEmail(), password); // Verify the user has this feature on item 1 - getClient(item1AdderWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1AdderWriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); } + + private ResultActions getAuthorizationFeatures(String adminToken, String uri) throws Exception { + return getAuthorizationFeatures(adminToken, uri, SIZE); + } + + private ResultActions getAuthorizationFeatures(String adminToken, String uri, int size) throws Exception { + return getClient(adminToken) + .perform( + get( + "/api/authz/authorizations/search/object?size=" + size + "&embed=feature&uri=" + uri + ) + ); + } } \ No newline at end of file diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/converter/RootConverterTest.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/converter/RootConverterTest.java index acb3b0c263d0..73b12848e790 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/converter/RootConverterTest.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/converter/RootConverterTest.java @@ -13,6 +13,7 @@ import org.dspace.app.rest.model.RootRest; import org.dspace.app.util.Util; +import org.dspace.core.CrisConstants; import org.dspace.services.ConfigurationService; import org.junit.Before; import org.junit.Test; @@ -52,7 +53,8 @@ public void testCorrectPropertiesSetFromConfigurationService() throws Exception assertEquals("dspaceurl", rootRest.getDspaceUI()); assertEquals("dspacename", rootRest.getDspaceName()); assertEquals(restUrl, rootRest.getDspaceServer()); - assertEquals("DSpace " + Util.getSourceVersion(), rootRest.getDspaceVersion()); + assertEquals(CrisConstants.DSPACE_BASE_VERSION, rootRest.getDspaceVersion()); + assertEquals(Util.getSourceVersion(), rootRest.getCrisVersion()); } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/eperson/DeleteEPersonSubmitterIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/eperson/DeleteEPersonSubmitterIT.java index e020c04b1a25..d1679ae1d20b 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/eperson/DeleteEPersonSubmitterIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/eperson/DeleteEPersonSubmitterIT.java @@ -24,9 +24,11 @@ import java.util.concurrent.atomic.AtomicReference; import javax.ws.rs.core.MediaType; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.app.requestitem.RequestItemAuthor; import org.dspace.app.requestitem.RequestItemAuthorExtractor; +import org.dspace.app.requestitem.RequestItemHelpdeskStrategy; import org.dspace.app.rest.model.patch.Operation; import org.dspace.app.rest.model.patch.ReplaceOperation; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; @@ -46,6 +48,7 @@ import org.dspace.eperson.EPerson; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.EPersonService; +import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.versioning.Version; import org.dspace.versioning.factory.VersionServiceFactory; @@ -76,7 +79,7 @@ public class DeleteEPersonSubmitterIT extends AbstractControllerIntegrationTest protected RequestItemAuthorExtractor requestItemAuthorExtractor = DSpaceServicesFactory.getInstance() .getServiceManager() - .getServiceByName("org.dspace.app.requestitem.RequestItemAuthorExtractor", + .getServiceByName(RequestItemHelpdeskStrategy.class.getName(), RequestItemAuthorExtractor.class); @@ -85,15 +88,8 @@ public class DeleteEPersonSubmitterIT extends AbstractControllerIntegrationTest private EPerson submitterForVersion2; private EPerson workflowUser; - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(DeleteEPersonSubmitterIT.class); + private static final Logger log = LogManager.getLogger(); - /** - * This method will be run before every test as per @Before. It will - * initialize resources required for the tests. - * - * Other methods can be annotated with @Before here or in subclasses but no - * execution order is guaranteed - */ @Before @Override public void setUp() throws Exception { @@ -114,8 +110,8 @@ public void setUp() throws Exception { /** - * This test verifies that when the submitter Eperson is deleted, the delete succeeds and the item will have - * 'null' as submitter + * This test verifies that when the submitter Eperson is deleted, the delete + * succeeds and the item will have 'null' as submitter. * * @throws Exception */ @@ -140,12 +136,21 @@ public void testArchivedItemSubmitterDelete() throws Exception { assertNull(retrieveItemSubmitter(installItem.getID())); + // Don't depend on external configuration; set up helpdesk as needed. + final String HELPDESK_EMAIL = "dspace-help@example.com"; + final String HELPDESK_NAME = "Help Desk"; + ConfigurationService configurationService + = DSpaceServicesFactory.getInstance().getConfigurationService(); + configurationService.setProperty("mail.helpdesk", HELPDESK_EMAIL); + configurationService.setProperty("mail.helpdesk.name", HELPDESK_NAME); + configurationService.setProperty("request.item.helpdesk.override", "true"); + // Test it. Item item = itemService.find(context, installItem.getID()); List requestItemAuthor = requestItemAuthorExtractor.getRequestItemAuthor(context, item); - assertEquals("Help Desk", requestItemAuthor.get(0).getFullName()); - assertEquals("dspace-help@myu.edu", requestItemAuthor.get(0).getEmail()); + assertEquals(HELPDESK_NAME, requestItemAuthor.get(0).getFullName()); + assertEquals(HELPDESK_EMAIL, requestItemAuthor.get(0).getEmail()); } /** diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/iiif/IIIFControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/iiif/IIIFControllerIT.java index 9e345a4b607e..accc99f44de3 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/iiif/IIIFControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/iiif/IIIFControllerIT.java @@ -7,7 +7,9 @@ */ package org.dspace.app.rest.iiif; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; @@ -129,7 +131,7 @@ public void findOneIIIFSearchableItemWithDefaultDimensionsIT() throws Exception .andExpect(status().isOk()) .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) .andExpect(jsonPath("$.service.profile", is("http://iiif.io/api/search/0/search"))) - .andExpect(jsonPath("$.thumbnail.@id", Matchers.containsString("/iiif/2/" + .andExpect(jsonPath("$.thumbnail.@id", containsString("/iiif/2/" + bitstream1.getID()))) .andExpect(jsonPath("$.metadata[0].label", is("Title"))) .andExpect(jsonPath("$.metadata[0].value", is("Public item 1"))) @@ -139,7 +141,7 @@ public void findOneIIIFSearchableItemWithDefaultDimensionsIT() throws Exception .andExpect(jsonPath("$.metadata[2].value[0]", is("Smith, Donald"))) .andExpect(jsonPath("$.metadata[2].value[1]", is("Doe, John"))) .andExpect(jsonPath("$.sequences[0].canvases[0].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas"))) + containsString("/iiif/" + publicItem1.getID() + "/canvas"))) .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("1"))) .andExpect(jsonPath("$.sequences[0].canvases[0].width", is(64))) .andExpect(jsonPath("$.sequences[0].canvases[0].height", is(64))) @@ -157,14 +159,14 @@ public void findOneIIIFSearchableItemWithDefaultDimensionsIT() throws Exception .andExpect(jsonPath("$.sequences[0].canvases[0].metadata[4].value", is("11e23c5702595ba512c1c2ee8e8d6153 (MD5)"))) .andExpect(jsonPath("$.sequences[0].canvases[1].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream2.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[1].label", is("2"))) .andExpect(jsonPath("$.sequences[0].canvases[1].images[0].resource.service.@id", Matchers.endsWith(bitstream2.getID().toString()))) .andExpect(jsonPath("$.structures").doesNotExist()) .andExpect(jsonPath("$.related.@id", - Matchers.containsString("/items/" + publicItem1.getID()))); + containsString("/items/" + publicItem1.getID()))); } @Test @@ -212,13 +214,13 @@ public void findOneIIIFSearchableWithMixedConfigIT() throws Exception { .andExpect(status().isOk()) .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) .andExpect(jsonPath("$.sequences[0].canvases[0].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream1.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("Custom Label"))) .andExpect(jsonPath("$.sequences[0].canvases[0].width", is(3163))) .andExpect(jsonPath("$.sequences[0].canvases[0].height", is(4220))) .andExpect(jsonPath("$.sequences[0].canvases[1].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream2.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[1].label", is("Global 2"))) .andExpect(jsonPath("$.sequences[0].canvases[1].width", is(2000))) @@ -227,6 +229,117 @@ public void findOneIIIFSearchableWithMixedConfigIT() throws Exception { .andExpect(jsonPath("$.service").exists()); } + @Test + public void findOneWithExcludedBitstreamIT() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .enableIIIF() + .build(); + + String bitstreamContent = "ThisIsSomeText"; + Bitstream bitstream1; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder + .createBitstream(context, publicItem1, is) + .withName("Bitstream1.jpg") + .withMimeType("image/jpeg") + .withIIIFLabel("Custom Label") + .build(); + } + Bitstream bitstream2; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder + .createBitstream(context, publicItem1, is) + .withName("Bitstream2.jpg") + .withMimeType("image/jpeg") + .withIIIFDisabled() + .build(); + } + context.restoreAuthSystemState(); + // Expect canvas label, width and height to match bitstream description. + getClient().perform(get("/iiif/" + publicItem1.getID() + "/manifest")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.sequences[0].canvases", Matchers.hasSize(1))) + .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) + .andExpect(jsonPath("$.sequences[0].canvases[0].@id", + containsString( + "/iiif/" + publicItem1.getID().toString() + "/canvas/" + bitstream1.getID().toString() + ) + )) + .andExpect(jsonPath("$.sequences[0].canvases[*].@id", + not( + containsString( + "/iiif/" + publicItem1.getID().toString() + "/canvas/" + bitstream2.getID().toString() + ) + ) + )) + .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("Custom Label"))); + } + + @Test + public void findOneWithExcludedBitstreamBundleIT() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .enableIIIF() + .build(); + + String bitstreamContent = "ThisIsSomeText"; + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder + .createBitstream(context, publicItem1, is) + .withName("Bitstream1.jpg") + .withMimeType("image/jpeg") + .withIIIFLabel("Custom Label") + .build(); + } + // Add bitstream + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder + .createBitstream(context, publicItem1, is, "ExcludedBundle", false) + .withName("Bitstream2.jpg") + .withMimeType("image/jpeg") + .build(); + } + context.restoreAuthSystemState(); + // Expect canvas label, width and height to match bitstream description. + getClient().perform(get("/iiif/" + publicItem1.getID() + "/manifest")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.sequences[0].canvases", Matchers.hasSize(1))) + .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) + .andExpect(jsonPath("$.sequences[0].canvases[0].@id", + containsString( + "/iiif/" + publicItem1.getID().toString() + "/canvas/" + bitstream1.getID().toString() + ) + )) + .andExpect(jsonPath("$.sequences[0].canvases[*].@id", + not( + containsString( + "/iiif/" + publicItem1.getID().toString() + "/canvas/" + bitstream2.getID().toString() + ) + ) + )) + .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("Custom Label"))); + } + + @Test public void findOneIIIFSearchableWithCustomBundleAndConfigIT() throws Exception { context.turnOffAuthorisationSystem(); @@ -262,7 +375,7 @@ public void findOneIIIFSearchableWithCustomBundleAndConfigIT() throws Exception .andExpect(status().isOk()) .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) .andExpect(jsonPath("$.sequences[0].canvases[0].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream1.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("Custom Label"))) .andExpect(jsonPath("$.sequences[0].canvases[0].width", is(3163))) @@ -416,7 +529,7 @@ public void findOneWithStructures() throws Exception { .andExpect(status().isOk()) .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) .andExpect(jsonPath("$.sequences[0].canvases[0].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream1.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("Global 1"))) .andExpect(jsonPath("$.sequences[0].canvases[0].width", is(2000))) @@ -435,16 +548,16 @@ public void findOneWithStructures() throws Exception { Matchers.endsWith("/iiif/" + publicItem1.getID() + "/manifest/range/r0-0"))) .andExpect(jsonPath("$.structures[1].label", is("Section 1"))) .andExpect(jsonPath("$.structures[1].canvases[0]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream1.getID().toString()))) .andExpect(jsonPath("$.structures[2].@id", Matchers.endsWith("/iiif/" + publicItem1.getID() + "/manifest/range/r0-1"))) .andExpect(jsonPath("$.structures[2].label", is("Section 2"))) .andExpect(jsonPath("$.structures[2].canvases[0]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream2.getID().toString()))) .andExpect(jsonPath("$.structures[2].canvases[1]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream3.getID().toString()))) .andExpect(jsonPath("$.service").exists()); } @@ -627,45 +740,45 @@ public void findOneWithHierarchicalStructures() throws Exception { .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) .andExpect(jsonPath("$.sequences[0].canvases", Matchers.hasSize(8))) .andExpect(jsonPath("$.sequences[0].canvases[0].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream1.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[0].images[0].resource.@id", - Matchers.containsString(bitstream1.getID().toString()))) + containsString(bitstream1.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[1].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream2.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[1].images[0].resource.@id", - Matchers.containsString(bitstream2.getID().toString()))) + containsString(bitstream2.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[2].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream3.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[2].images[0].resource.@id", - Matchers.containsString(bitstream3.getID().toString()))) + containsString(bitstream3.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[3].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream4.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[3].images[0].resource.@id", - Matchers.containsString(bitstream4.getID().toString()))) + containsString(bitstream4.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[4].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream5.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[4].images[0].resource.@id", - Matchers.containsString(bitstream5.getID().toString()))) + containsString(bitstream5.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[5].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream6.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[5].images[0].resource.@id", - Matchers.containsString(bitstream6.getID().toString()))) + containsString(bitstream6.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[6].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream7.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[6].images[0].resource.@id", - Matchers.containsString(bitstream7.getID().toString()))) + containsString(bitstream7.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[7].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream8.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[7].images[0].resource.@id", - Matchers.containsString(bitstream8.getID().toString()))) + containsString(bitstream8.getID().toString()))) .andExpect(jsonPath("$.structures[0].@id", Matchers.endsWith("/iiif/" + publicItem1.getID() + "/manifest/range/r0"))) // the toc contains two top sections 1 & 2 without direct children canvases @@ -688,20 +801,20 @@ public void findOneWithHierarchicalStructures() throws Exception { Matchers.endsWith("/iiif/" + publicItem1.getID() + "/manifest/range/r0-0-1"))) .andExpect(jsonPath("$.structures[1].canvases", Matchers.hasSize(2))) .andExpect(jsonPath("$.structures[1].canvases[0]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream1.getID().toString()))) .andExpect(jsonPath("$.structures[1].canvases[1]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream5.getID().toString()))) // section 1 > a contains bitstream 2 and 3, no sub sections .andExpect(jsonPath("$.structures[2].label", is("a"))) .andExpect(jsonPath("$.structures[2].ranges").doesNotExist()) .andExpect(jsonPath("$.structures[2].canvases", Matchers.hasSize(2))) .andExpect(jsonPath("$.structures[2].canvases[0]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream2.getID().toString()))) .andExpect(jsonPath("$.structures[2].canvases[1]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream3.getID().toString()))) // section 1 > b contains only the bitstream 4 and no sub sections .andExpect(jsonPath("$.structures[3].@id", @@ -710,7 +823,7 @@ public void findOneWithHierarchicalStructures() throws Exception { .andExpect(jsonPath("$.structures[3].ranges").doesNotExist()) .andExpect(jsonPath("$.structures[3].canvases", Matchers.hasSize(1))) .andExpect(jsonPath("$.structures[3].canvases[0]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream4.getID().toString()))) // section 2 contains bitstream 6 and 7, sub section "sub 2-1" .andExpect(jsonPath("$.structures[4].label", is("Section 2"))) @@ -719,10 +832,10 @@ public void findOneWithHierarchicalStructures() throws Exception { Matchers.endsWith("/iiif/" + publicItem1.getID() + "/manifest/range/r0-1-0"))) .andExpect(jsonPath("$.structures[4].canvases", Matchers.hasSize(2))) .andExpect(jsonPath("$.structures[4].canvases[0]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream6.getID().toString()))) .andExpect(jsonPath("$.structures[4].canvases[1]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream7.getID().toString()))) // section 2 > sub 2-1 contains only the bitstream 8 no sub sections .andExpect(jsonPath("$.structures[5].@id", @@ -731,7 +844,7 @@ public void findOneWithHierarchicalStructures() throws Exception { .andExpect(jsonPath("$.structures[5].ranges").doesNotExist()) .andExpect(jsonPath("$.structures[5].canvases", Matchers.hasSize(1))) .andExpect(jsonPath("$.structures[5].canvases[0]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream8.getID().toString()))) .andExpect(jsonPath("$.service").exists()); } @@ -844,7 +957,7 @@ public void findOneIIIFNotSearcheableIT() throws Exception { .andExpect(jsonPath("$.license", is("https://license.org"))) .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) .andExpect(jsonPath("$.sequences[0].canvases[0].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream1.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("1"))) .andExpect(jsonPath("$.service").doesNotExist()); @@ -894,7 +1007,7 @@ public void findOneIIIFWithOtherContentIT() throws Exception { .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) .andExpect(jsonPath("$.seeAlso.@type", is("sc:AnnotationList"))) .andExpect(jsonPath("$.sequences[0].canvases[0].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream1.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("1"))) .andExpect(jsonPath("$.service").doesNotExist()); @@ -949,7 +1062,7 @@ public void findOneUsingOriginalBundleIgnoreFileIT() throws Exception { .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) .andExpect(jsonPath("$.sequences[0].canvases", Matchers.hasSize(1))) .andExpect(jsonPath("$.sequences[0].canvases[0].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("1"))) .andExpect(jsonPath("$.rendering.@id", @@ -1026,13 +1139,13 @@ public void findOneIIIFRestrictedItem() throws Exception { .andExpect(status().isOk()) .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) .andExpect(jsonPath("$.sequences[0].canvases[0].@id", - Matchers.containsString("/iiif/" + restrictedItem1.getID() + "/canvas/" + containsString("/iiif/" + restrictedItem1.getID() + "/canvas/" + bitstream1.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("Custom Label"))) .andExpect(jsonPath("$.sequences[0].canvases[0].width", is(3163))) .andExpect(jsonPath("$.sequences[0].canvases[0].height", is(4220))) .andExpect(jsonPath("$.sequences[0].canvases[1].@id", - Matchers.containsString("/iiif/" + restrictedItem1.getID() + "/canvas/" + containsString("/iiif/" + restrictedItem1.getID() + "/canvas/" + bitstream2.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[1].label", is("Global 2"))) .andExpect(jsonPath("$.sequences[0].canvases[1].width", is(2000))) @@ -1168,7 +1281,7 @@ public void getAnnotationListSeeAlso() throws Exception { .andExpect(jsonPath("$.resources[0].@type", is("oa:Annotation"))) .andExpect(jsonPath("$.resources[0].motivation", is ("oa:linking"))) .andExpect(jsonPath("$.resources[0].resource.@id", - Matchers.containsString(bitstream2.getID() + "/content"))); + containsString(bitstream2.getID() + "/content"))); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java index 80ed84453088..48516a840da6 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java @@ -12,8 +12,10 @@ import static org.dspace.app.rest.matcher.CrisLayoutBoxMatcher.matchBox; import static org.dspace.app.rest.matcher.CrisLayoutTabMatcher.matchRest; import static org.dspace.app.rest.matcher.CrisLayoutTabMatcher.matchTab; +import static org.dspace.builder.RelationshipTypeBuilder.createRelationshipTypeBuilder; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -56,6 +58,7 @@ import org.dspace.builder.EntityTypeBuilder; import org.dspace.builder.GroupBuilder; import org.dspace.builder.ItemBuilder; +import org.dspace.builder.RelationshipBuilder; import org.dspace.content.Bundle; import org.dspace.content.Collection; import org.dspace.content.Community; @@ -63,11 +66,18 @@ import org.dspace.content.Item; import org.dspace.content.MetadataField; import org.dspace.content.MetadataSchema; +import org.dspace.content.MetadataSchemaEnum; +import org.dspace.content.Relationship; +import org.dspace.content.RelationshipType; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.EntityTypeService; import org.dspace.content.service.ItemService; import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataSchemaService; +import org.dspace.content.service.RelationshipService; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; +import org.dspace.eperson.service.GroupService; import org.dspace.layout.CrisLayoutBox; import org.dspace.layout.CrisLayoutBoxTypes; import org.dspace.layout.CrisLayoutCell; @@ -104,6 +114,18 @@ public class CrisLayoutTabRestRepositoryIT extends AbstractControllerIntegration @Autowired private CrisLayoutTabService crisLayoutTabService; + @Autowired + private BitstreamService bitstreamService; + + @Autowired + protected EntityTypeService entityTypeService; + + @Autowired + protected RelationshipService relationshipService; + + @Autowired + protected GroupService groupService; + private final String METADATASECURITY_URL = "http://localhost:8080/api/core/metadatafield/"; /** @@ -634,6 +656,293 @@ public void findByItem() throws Exception { .andExpect(jsonPath("$._embedded.tabs[1].rows[0].cells[0].boxes", contains(matchBox(box)))); } + /** + * Test for endpoint /api/layout/tabs/search/findByItem?uuid= + * The tabs are sorted by priority ascending. This are filtered based on the permission of the + * current user and available data. + * The expected result is a list of tabs derived from the item type, where the item type is: + *

        + *
      • submissionName.Authority of metadata configured in property {@code dspace.metadata.layout.tab}
      • + *
      • If null, submissionName.value of that metadata
      • + *
      • if null, Authority of metadata configured in property {@code dspace.metadata.layout.tab}
      • + *
      • If null, value of that metadata
      • + *
      • if null, submission name of item
      • + *
      • If null, value of entity type (metadata {@code dspace.entity.type})
      • + *
      • Otherwise, null
      • + *
      + * @throws Exception + */ + @Test + public void findByItemMetadata() throws Exception { + context.turnOffAuthorisationSystem(); + + // Create new community + Community community = CommunityBuilder.createCommunity(context) + .withName("Test Community") + .withTitle("Title test community") + .build(); + // Create new collection + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Test Collection") + .withSubmissionDefinition("publication") + .build(); + + Collection collectionTwo = CollectionBuilder.createCollection(context, community) + .withName("Test Collection two") + .withSubmissionDefinition("traditional") + .build(); + + Collection collectionThree = CollectionBuilder.createCollection(context, community) + .withName("Test Collection two") + .withSubmissionDefinition("patent") + .build(); + + // Create entity Type + EntityType publicationType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); + EntityType journalType = EntityTypeBuilder.createEntityTypeBuilder(context, "Journal").build(); + EntityType patentType = EntityTypeBuilder.createEntityTypeBuilder(context, "Patent").build(); + EntityType eTypePer = EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); + EntityType eTypeCollection = EntityTypeBuilder.createEntityTypeBuilder(context, "Collection").build(); + MetadataSchema schema = mdss.find(context, MetadataSchemaEnum.DC.getName()); + MetadataField title = mfss.findByElement(context, schema, "title", null); + + // Create new items + // first uses metadata type authority and submission as custom filter + String authority = "publication-coar-types:c_2f33"; + String metadataValue = "Resource Types::text::book"; + String submissionNameMetadataValue = "traditional." + metadataValue; + String submissionNameAuthority = "patent." + authority; + + Item itemPublicationAuthority = ItemBuilder.createItem(context, collection) + .withTitle("TITLE") + .withType(metadataValue, authority) + .withEntityType(publicationType.getLabel()) + .build(); + // second uses ametadata type value as custom filter + Item itemPublicationValue = ItemBuilder.createItem(context, collection) + .withTitle("TITLE 1") + .withType(metadataValue) + .withEntityType(publicationType.getLabel()) + .build(); + // third uses entity type value as custom filter + Item itemPublication = ItemBuilder.createItem(context, collection) + .withTitle("TITLE 2") + .withEntityType(publicationType.getLabel()) + .build(); + // fourth uses submission name as custom filter + Item itemPublicationSubmission = ItemBuilder.createItem(context, collection) + .withTitle("TITLE 3") + .withType("type value") + .withEntityType(publicationType.getLabel()) + .build(); + // fifth uses submissionName.metadataValue as custom filter + Item itemPublicationSubmissionMetadata = ItemBuilder.createItem(context, collectionTwo) + .withTitle("TITLE 4") + .withType(metadataValue) + .withEntityType(journalType.getLabel()) + .build(); + + // sixth uses submissionName.authority as custom filter + Item itemPublicationSubmissionAuthority = ItemBuilder.createItem(context, collectionThree) + .withTitle("TITLE 5") + .withType(metadataValue, authority) + .withEntityType(patentType.getLabel()) + .build(); + + + // Create tabs for Publication Entity + CrisLayoutField field = CrisLayoutFieldBuilder.createMetadataField(context, title, 0, 1) + .withLabel("TITLE") + .withRendering("TEXT") + //.withBox(boxOne) + .build(); + CrisLayoutBox boxOne = CrisLayoutBoxBuilder.createBuilder(context, publicationType, false, false) + .withShortname("Box shortname 1") + .withSecurity(LayoutSecurity.PUBLIC) + .withContainer(false) + .addField(field) + .build(); + CrisLayoutTab tabAuthority = CrisLayoutTabBuilder.createTab(context, publicationType, 0) + .withShortName("TabOne For Publication - priority 0") + .withSecurity(LayoutSecurity.PUBLIC) + .withHeader("New Tab header") + .withCustomFilter(authority) + .addBoxIntoNewRow(boxOne) + .build(); + + context.restoreAuthSystemState(); + // Test + getClient() + .perform( + get("/api/layout/tabs/search/findByItem") + .param("uuid",itemPublicationAuthority.getID().toString()) + ) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect(jsonPath("$._embedded.tabs", contains(matchTab(tabAuthority)))); + + context.turnOffAuthorisationSystem(); + + boxOne = CrisLayoutBoxBuilder.createBuilder(context, publicationType, false, false) + .withShortname("Box shortname 1") + .withSecurity(LayoutSecurity.PUBLIC) + .withContainer(false) + .addField(field) + .build(); + CrisLayoutTab tabPublicationValue = CrisLayoutTabBuilder.createTab(context, publicationType, 0) + .withShortName("TabOne For Collection - priority 0") + .withSecurity(LayoutSecurity.PUBLIC) + .withHeader("New Tab header") + .withCustomFilter(metadataValue) + .addBoxIntoNewRow(boxOne) + .build(); + + context.restoreAuthSystemState(); + + getClient() + .perform( + get("/api/layout/tabs/search/findByItem") + .param("uuid",itemPublicationValue.getID().toString()) + ) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect(jsonPath("$._embedded.tabs", contains(matchTab(tabPublicationValue)))); + + context.turnOffAuthorisationSystem(); + + boxOne = CrisLayoutBoxBuilder.createBuilder(context, publicationType, false, false) + .withShortname("Box shortname 1") + .withSecurity(LayoutSecurity.PUBLIC) + .withContainer(false) + .addField(field) + .build(); + CrisLayoutTab tabPublication = CrisLayoutTabBuilder.createTab(context, publicationType, 0) + .withShortName("TabOne For Person - priority 0") + .withSecurity(LayoutSecurity.PUBLIC) + .withHeader("New Tab header") + .withCustomFilter(null) + .addBoxIntoNewRow(boxOne) + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/layout/tabs/search/findByItem").param("uuid", itemPublication.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect( + jsonPath( + "$._embedded.tabs", + contains( + matchTab(tabPublication) + ) + ) + ); + + context.turnOffAuthorisationSystem(); + + boxOne = CrisLayoutBoxBuilder.createBuilder(context, publicationType, false, false) + .withShortname("Box shortname 1") + .withSecurity(LayoutSecurity.PUBLIC) + .withContainer(false) + .addField(field) + .build(); + + CrisLayoutTab tabSubmissionName = CrisLayoutTabBuilder.createTab(context, publicationType, 0) + .withShortName("TabOne For Submission - priority 0") + .withSecurity(LayoutSecurity.PUBLIC) + .withHeader("New Tab header") + .withCustomFilter("publication") + .addBoxIntoNewRow(boxOne) + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", itemPublicationSubmission.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect( + jsonPath( + "$._embedded.tabs", + contains( + matchTab(tabSubmissionName) + ) + ) + ); + + context.turnOffAuthorisationSystem(); + + boxOne = CrisLayoutBoxBuilder.createBuilder(context, publicationType, false, false) + .withShortname("Box shortname 1") + .withSecurity(LayoutSecurity.PUBLIC) + .withContainer(false) + .addField(field) + .build(); + + CrisLayoutTab tabSubmissionNameMetadata = + CrisLayoutTabBuilder.createTab(context, journalType, 0) + .withShortName("TabOne For Submission metadata value - priority 0") + .withSecurity(LayoutSecurity.PUBLIC) + .withHeader("New Tab header") + .withCustomFilter(submissionNameMetadataValue) + .addBoxIntoNewRow(boxOne) + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", itemPublicationSubmissionMetadata.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect( + jsonPath( + "$._embedded.tabs", + contains( + matchTab(tabSubmissionNameMetadata) + ) + ) + ); + + context.turnOffAuthorisationSystem(); + + boxOne = CrisLayoutBoxBuilder.createBuilder(context, patentType, false, false) + .withShortname("Box shortname 1") + .withSecurity(LayoutSecurity.PUBLIC) + .withContainer(false) + .addField(field) + .build(); + + CrisLayoutTab tabSubmissionNameAuthority = + CrisLayoutTabBuilder.createTab(context, patentType, 0) + .withShortName("TabOne For Submission authority - priority 0") + .withSecurity(LayoutSecurity.PUBLIC) + .withHeader("New Tab header") + .withCustomFilter(submissionNameAuthority) + .addBoxIntoNewRow(boxOne) + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", itemPublicationSubmissionAuthority.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect( + jsonPath( + "$._embedded.tabs", + contains( + matchTab(tabSubmissionNameAuthority) + ) + ) + ); + } + /** * Test for the altering which happens at endpoint /api/layout/tabs/search/findByItem?uuid= * The configuration of CrisLayoutBoxRest: boxType=METRICS, is altered by inner joining the CrisLayoutBoxRest @@ -1730,6 +2039,198 @@ public void findByItemTabsWithCustomSecurityLayoutAnonynousTest() throws Excepti .andExpect(jsonPath("$._embedded.tabs[0].rows[1].cells[0].boxes", contains(matchBox(box2)))); } + @Test + public void findByItemTabsWithHiddenRelationshipsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + EntityType eType = EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); + + EPerson userA = + EPersonBuilder.createEPerson(context) + .withNameInMetadata("Mecca", "Vincenzo") + .withEmail("vins@4science.com") + .withPassword(password) + .build(); + + Community community = + CommunityBuilder.createCommunity(context) + .withName("Test Community") + .withTitle("Title test community") + .build(); + + Collection col1 = + CollectionBuilder.createCollection(context, community) + .withName("Test Publications") + .build(); + + Collection people = + CollectionBuilder.createCollection(context, community) + .withName("People") + .withEntityType("Person") + .build(); + + Item firstPerson = + ItemBuilder.createItem(context, people) + .withTitle("4Science, Vins") + .build(); + + // RELATION.Person.researchoutputs + CrisLayoutBoxBuilder.createBuilder(context, eType, CrisLayoutBoxTypes.RELATION.name(), true, true) + .withShortname("box-shortname-one") + .build(); + + CrisLayoutBox box1 = + CrisLayoutBoxBuilder.createBuilder(context, eType, CrisLayoutBoxTypes.RELATION.name(), true, true) + .withShortname("researchoutputs") + .withHeader("Publications") + .withSecurity(LayoutSecurity.PUBLIC) + .withType(CrisLayoutBoxTypes.RELATION.name()) + .build(); + + + CrisLayoutBox box2 = + CrisLayoutBoxBuilder.createBuilder(context, eType, true, true) + .withShortname("box-shortname-two") + .withSecurity(LayoutSecurity.PUBLIC) + .build(); + + CrisLayoutFieldBuilder.createMetadataField(context, "dc.title", 0, 0) + .withLabel("LABEL TITLE") + .withRendering("RENDERIGN TITLE") + .withRowStyle("STYLE") + .withBox(box2) + .build(); + + CrisLayoutTab tab = + CrisLayoutTabBuilder.createTab(context, eType, 0) + .withShortName("details") + .withHeader("Profile") + .addBoxIntoNewRow(box2) + .withSecurity(LayoutSecurity.PUBLIC) + .build(); + + CrisLayoutTab tab1 = + CrisLayoutTabBuilder.createTab(context, eType, 0) + .withShortName("publications") + .withHeader("Publications") + .addBoxIntoNewRow(box1) + .withSecurity(LayoutSecurity.PUBLIC) + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", firstPerson.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect(jsonPath("$._embedded.tabs", contains(matchTab(tab)))) + .andExpect(jsonPath("$._embedded.tabs", not(contains(matchTab(tab1))))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(box2)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[1]").doesNotExist()); + + String tokenUserA = getAuthToken(userA.getEmail(), password); + getClient(tokenUserA).perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", firstPerson.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect(jsonPath("$._embedded.tabs", contains(matchTab(tab)))) + .andExpect(jsonPath("$._embedded.tabs", not(contains(matchTab(tab1))))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", hasSize(1))) + .andExpect( + jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(box2))) + ) + .andExpect(jsonPath("$._embedded.tabs[0].rows[1]").doesNotExist()); + + context.turnOffAuthorisationSystem(); + + // reload the collection as we need to create an additional item in it + col1 = context.reloadEntity(col1); + Item publication1 = + ItemBuilder.createItem(context, col1) + .withTitle("Title Of Item") + .withIssueDate("2015-06-25") + .withAuthor("4Science, Vins", firstPerson.getID().toString()) + .withEntityType("Publication") + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", firstPerson.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(2))) + .andExpect(jsonPath("$._embedded.tabs", containsInAnyOrder(matchTab(tab), matchTab(tab1)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(box2)))) + .andExpect(jsonPath("$._embedded.tabs[1].rows[0].cells[0].boxes", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[1].rows[0].cells[0].boxes", contains(matchBox(box1)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[1]").doesNotExist()) + .andExpect(jsonPath("$._embedded.tabs[1].rows[1]").doesNotExist()); + + getClient(tokenUserA).perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", firstPerson.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(2))) + .andExpect(jsonPath("$._embedded.tabs", containsInAnyOrder(matchTab(tab), matchTab(tab1)))) + .andExpect( + jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", hasSize(1))) + .andExpect( + jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(box2))) + ) + .andExpect(jsonPath("$._embedded.tabs[1].rows[0].cells[0].boxes", hasSize(1))) + .andExpect( + jsonPath("$._embedded.tabs[1].rows[0].cells[0].boxes", contains(matchBox(box1))) + ) + .andExpect(jsonPath("$._embedded.tabs[0].rows[1]").doesNotExist()) + .andExpect(jsonPath("$._embedded.tabs[1].rows[1]").doesNotExist()); + + context.turnOffAuthorisationSystem(); + + RelationshipType hiddenResearchOutput = + createRelationshipTypeBuilder( + context, null, entityTypeService.findByEntityType(context, "Person"), "isResearchoutputsHiddenFor", + "notDisplayingResearchoutputs", 0, null, 0, null + ).build(); + + final Relationship publicationOneHiddenByFirstPerson = + RelationshipBuilder.createRelationshipBuilder( + context, publication1, firstPerson, hiddenResearchOutput + ).build(); + + context.restoreAuthSystemState(); + try { + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", firstPerson.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect(jsonPath("$._embedded.tabs", not(contains(matchTab(tab1))))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(box2)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[1]").doesNotExist()); + + getClient(tokenUserA).perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", firstPerson.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect(jsonPath("$._embedded.tabs", not(contains(matchTab(tab1))))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", hasSize(1))) + .andExpect( + jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(box2)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[1]").doesNotExist()); + + } finally { + RelationshipBuilder.deleteRelationship(publicationOneHiddenByFirstPerson.getID()); + } + + } + @Test public void findThumbnailUsingLayoutTabBoxConfiguration() throws Exception { context.turnOffAuthorisationSystem(); @@ -1786,6 +2287,401 @@ public void findThumbnailUsingLayoutTabBoxConfiguration() throws Exception { } + @Test + public void excludeThumbnailNegativeMetadataValueMatcherTabBoxConfiguration() throws Exception { + context.turnOffAuthorisationSystem(); + EntityType eType = + EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); + // Setting up configuration for dc.type = logo with rendering thumbnail + MetadataField metadataField = + mfss.findByElement(context, "dc", "type", null); + + CrisLayoutBox box = + CrisLayoutBoxBuilder.createBuilder(context, eType, true, false) + .withShortname("researcherprofile") + .withSecurity(LayoutSecurity.PUBLIC) + .build(); + + CrisLayoutField field = + CrisLayoutFieldBuilder.createBistreamField(context, metadataField, "ORIGINAL", 0, 0, 0) + .withRendering("thumbnail") + .withBox(box) + .build(); + + // filter out bitstreams with "personal picture" as dc.type + ((CrisLayoutFieldBitstream)field).setMetadataValue("!personal picture"); + + CrisLayoutTab tab = + CrisLayoutTabBuilder.createTab(context, eType, 0) + .withShortName("otherinfo") + .withSecurity(LayoutSecurity.PUBLIC) + .withHeader("Other") + .addBoxIntoNewRow(box) + .build(); + + Community community = CommunityBuilder.createCommunity(context).build(); + Collection personCollection = CollectionBuilder.createCollection(context, community).build(); + Item item = ItemBuilder.createItem(context, personCollection).withEntityType("Person").build(); + + Bundle original = BundleBuilder.createBundle(context, item).withName("ORIGINAL").build(); + + org.dspace.content.Bitstream bitstream0 = + BitstreamBuilder.createBitstream(context, original, InputStream.nullInputStream()) + .withType("logo") + .build(); + + original.setPrimaryBitstreamID(bitstream0); + + context.commit(); + context.restoreAuthSystemState(); + + item = context.reloadEntity(item); + + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", item.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect(jsonPath("$._embedded.tabs", contains(matchTab(tab)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(box)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[1]").doesNotExist()); + + context.turnOffAuthorisationSystem(); + + original = context.reloadEntity(original); + org.dspace.content.Bitstream bitstream1 = + BitstreamBuilder.createBitstream(context, original, InputStream.nullInputStream()) + .withType("personal picture") + .build(); + original.setPrimaryBitstreamID(bitstream1); + + context.commit(); + context.restoreAuthSystemState(); + + item = context.reloadEntity(item); + + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", item.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect(jsonPath("$._embedded.tabs", contains(matchTab(tab)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(box)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[1]").doesNotExist()); + + context.turnOffAuthorisationSystem(); + + bitstream0 = context.reloadEntity(bitstream0); + + bitstreamService.delete(context, bitstream0); + + context.commit(); + context.restoreAuthSystemState(); + + context.reloadEntity(item); + + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", item.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(0))) + .andExpect(jsonPath("$._embedded.tabs").doesNotExist()); + + } + + @Test + public void excludeThumbnailNegativeMetadataValueMatcherTabMultiBoxConfiguration() throws Exception { + context.turnOffAuthorisationSystem(); + EntityType eType = + EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); + // Setting up configuration for dc.type = logo with rendering thumbnail + MetadataField dcType = + mfss.findByElement(context, "dc", "type", null); + MetadataField dcTitle = + mfss.findByElement(context, "dc", "title", null); + + CrisLayoutBox thumbnailBox = + CrisLayoutBoxBuilder.createBuilder(context, eType, true, false) + .withShortname("researcherprofile") + .withSecurity(LayoutSecurity.PUBLIC) + .build(); + CrisLayoutBox titleBox = + CrisLayoutBoxBuilder.createBuilder(context, eType, true, false) + .withShortname("title") + .withSecurity(LayoutSecurity.PUBLIC) + .build(); + + CrisLayoutField thumbnailField = + CrisLayoutFieldBuilder.createBistreamField(context, dcType, "ORIGINAL", 0, 0, 0) + .withRendering("thumbnail") + .withBox(thumbnailBox) + .build(); + + // filter out bitstreams with "personal picture" as dc.type + ((CrisLayoutFieldBitstream)thumbnailField).setMetadataValue("!personal picture"); + + CrisLayoutField titleField = + CrisLayoutFieldBuilder.createMetadataField(context, dcTitle, 0, 0) + .withRendering("heading") + .withBox(titleBox) + .build(); + + CrisLayoutTab tab = + CrisLayoutTabBuilder.createTab(context, eType, 0) + .withShortName("otherinfo") + .withSecurity(LayoutSecurity.PUBLIC) + .withHeader("Other") + .addBoxIntoNewRow(thumbnailBox) + .addBoxIntoNewRow(titleBox) + .build(); + + Community community = CommunityBuilder.createCommunity(context).build(); + Collection personCollection = CollectionBuilder.createCollection(context, community).build(); + Item item = + ItemBuilder.createItem(context, personCollection) + .withEntityType("Person") + .withTitle("Custom Person") + .build(); + + Bundle original = + BundleBuilder.createBundle(context, item) + .withName("ORIGINAL") + .build(); + + org.dspace.content.Bitstream bitstream0 = + BitstreamBuilder.createBitstream(context, original, InputStream.nullInputStream()) + .withType("personal picture") + .build(); + + original.setPrimaryBitstreamID(bitstream0); + + context.commit(); + context.restoreAuthSystemState(); + + item = context.reloadEntity(item); + + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", item.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect(jsonPath("$._embedded.tabs", contains(matchTab(tab)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", + not(contains(matchBox(thumbnailBox), matchBox(titleBox))))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(titleBox)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[1]").doesNotExist()); + } + + @Test + public void testFindByItemWithAlternativeTabs() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema schema = mdss.find(context, "person"); + MetadataField firstName = mfss.findByElement(context, schema, "givenName", null); + Group adminGroup = groupService.findByName(context, Group.ADMIN); + // Create new community + Community community = CommunityBuilder.createCommunity(context) + .withName("Test Community") + .withTitle("Title test community") + .build(); + // Create new collection + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Test Collection") + .build(); + // Create entity Type + EntityTypeBuilder.createEntityTypeBuilder(context, "Publication") + .build(); + EntityType eTypePer = EntityTypeBuilder.createEntityTypeBuilder(context, "Person") + .build(); + // Create new person item + Item item = ItemBuilder.createItem(context, collection) + .withPersonIdentifierFirstName("Danilo") + .withPersonIdentifierLastName("Di Nuzzo") + .withEntityType(eTypePer.getLabel()) + .build(); + + CrisLayoutBox boxOne = CrisLayoutBoxBuilder.createBuilder(context, eTypePer, false, false) + .withShortname("Box shortname 1") + .withSecurity(LayoutSecurity.PUBLIC) + .withContainer(false) + .build(); + + CrisLayoutBox boxTwo = CrisLayoutBoxBuilder.createBuilder(context, eTypePer, false, false) + .withShortname("Box shortname 2") + .withSecurity(LayoutSecurity.PUBLIC) + .withContainer(false) + .build(); + + CrisLayoutFieldBuilder.createMetadataField(context, firstName, 0, 1) + .withLabel("GIVEN NAME") + .withRendering("TEXT") + .withBox(boxOne) + .build(); + + CrisLayoutFieldBuilder.createMetadataField(context, firstName, 0, 1) + .withLabel("GIVEN NAME") + .withRendering("TEXT") + .withBox(boxTwo) + .build(); + + // add boxOne to tabOne + CrisLayoutTab tabOne = + CrisLayoutTabBuilder.createTab(context, eTypePer, 0) + .withShortName("TabOne For Person - priority 0") + .withSecurity(LayoutSecurity.ADMINISTRATOR) + .withHeader("New Tab header") + .addBoxIntoNewRow(boxOne, "rowTwoStyle", "cellOfRowTwoStyle") + .build(); + + // add boxTwo to tabTwo + CrisLayoutTab tabTwo = + CrisLayoutTabBuilder.createTab(context, eTypePer, 0) + .withShortName("Tab2 For Person - priority 0") + .withSecurity(LayoutSecurity.CUSTOM_DATA) + .withHeader("New Tab2 header") + .addBoxIntoNewRow(boxTwo, "rowTwoStyle2", "cellOfRowTwoStyle2") + .addTab2SecurityGroups(adminGroup, tabOne) + .build(); + + context.restoreAuthSystemState(); + + // admin user will see two tabs + getClient(getAuthToken(admin.getEmail(), password)) + .perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", item.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", is(2))) + .andExpect(jsonPath("$._embedded.tabs[0].id", is(tabOne.getID()))) + .andExpect(jsonPath("$._embedded.tabs[0].shortname", is("TabOne For Person - priority 0"))) + .andExpect(jsonPath("$._embedded.tabs[0].header", is("New Tab header"))) + .andExpect(jsonPath("$._embedded.tabs[0].security", is(LayoutSecurity.ADMINISTRATOR.getValue()))) + .andExpect(jsonPath("$._embedded.tabs[0].rows", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[1].id", is(tabTwo.getID()))) + .andExpect(jsonPath("$._embedded.tabs[1].shortname", is("Tab2 For Person - priority 0"))) + .andExpect(jsonPath("$._embedded.tabs[1].header", is("New Tab2 header"))) + .andExpect(jsonPath("$._embedded.tabs[1].security", is(LayoutSecurity.CUSTOM_DATA.getValue()))) + .andExpect(jsonPath("$._embedded.tabs[1].rows", hasSize(1))); + + // anonymous user will see only alternative tab is tabOne + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", item.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", is(1))) + .andExpect(jsonPath("$._embedded.tabs[0].id", is(tabOne.getID()))) + .andExpect(jsonPath("$._embedded.tabs[0].shortname", is("TabOne For Person - priority 0"))) + .andExpect(jsonPath("$._embedded.tabs[0].header", is("New Tab header"))) + .andExpect(jsonPath("$._embedded.tabs[0].security", is(LayoutSecurity.ADMINISTRATOR.getValue()))) + .andExpect(jsonPath("$._embedded.tabs[0].rows", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].style", is("rowTwoStyle"))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].style", is("cellOfRowTwoStyle"))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(boxOne)))); + } + + @Test + public void testFindByItemWithAlternativeBoxes() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema schema = mdss.find(context, "person"); + MetadataField firstName = mfss.findByElement(context, schema, "givenName", null); + Group adminGroup = groupService.findByName(context, Group.ADMIN); + // Create new community + Community community = CommunityBuilder.createCommunity(context) + .withName("Test Community") + .withTitle("Title test community") + .build(); + // Create new collection + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Test Collection") + .build(); + // Create entity Type + EntityTypeBuilder.createEntityTypeBuilder(context, "Publication") + .build(); + EntityType eTypePer = EntityTypeBuilder.createEntityTypeBuilder(context, "Person") + .build(); + // Create new person item + Item item = ItemBuilder.createItem(context, collection) + .withPersonIdentifierFirstName("Danilo") + .withPersonIdentifierLastName("Di Nuzzo") + .withEntityType(eTypePer.getLabel()) + .build(); + + CrisLayoutBox boxOne = CrisLayoutBoxBuilder.createBuilder(context, eTypePer, false, false) + .withShortname("Box shortname 1") + .withSecurity(LayoutSecurity.PUBLIC) + .withContainer(false) + .build(); + + // add boxOne as alternative to boxTwo + CrisLayoutBox boxTwo = CrisLayoutBoxBuilder.createBuilder(context, eTypePer, false, false) + .withShortname("Box shortname 2") + .withSecurity(LayoutSecurity.CUSTOM_DATA) + .withContainer(false) + .addBox2SecurityGroups(adminGroup, boxOne) + .build(); + + CrisLayoutFieldBuilder.createMetadataField(context, firstName, 0, 1) + .withLabel("GIVEN NAME") + .withRendering("TEXT") + .withBox(boxOne) + .build(); + + CrisLayoutFieldBuilder.createMetadataField(context, firstName, 0, 1) + .withLabel("GIVEN NAME") + .withRendering("TEXT") + .withBox(boxTwo) + .build(); + + // add boxTwo to tab + CrisLayoutTab tab = CrisLayoutTabBuilder.createTab(context, eTypePer, 0) + .withShortName("TabOne For Person - priority 0") + .withSecurity(LayoutSecurity.PUBLIC) + .withHeader("New Tab header") + .withLeading(true) + .addBoxIntoNewRow(boxTwo, "rowTwoStyle", "cellOfRowTwoStyle") + .build(); + + context.restoreAuthSystemState(); + + // admin user will see boxTwo + getClient(getAuthToken(admin.getEmail(), password)) + .perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", item.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.tabs[0].id", is(tab.getID()))) + .andExpect(jsonPath("$._embedded.tabs[0].shortname", is("TabOne For Person - priority 0"))) + .andExpect(jsonPath("$._embedded.tabs[0].header", is("New Tab header"))) + .andExpect(jsonPath("$._embedded.tabs[0].leading", is(true))) + .andExpect(jsonPath("$._embedded.tabs[0].security", is(LayoutSecurity.PUBLIC.getValue()))) + .andExpect(jsonPath("$._embedded.tabs[0].rows", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].style", is("rowTwoStyle"))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].style", is("cellOfRowTwoStyle"))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(boxTwo)))); + + // anonymous user will see boxOne + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", item.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.tabs[0].id", is(tab.getID()))) + .andExpect(jsonPath("$._embedded.tabs[0].shortname", is("TabOne For Person - priority 0"))) + .andExpect(jsonPath("$._embedded.tabs[0].header", is("New Tab header"))) + .andExpect(jsonPath("$._embedded.tabs[0].leading", is(true))) + .andExpect(jsonPath("$._embedded.tabs[0].security", is(LayoutSecurity.PUBLIC.getValue()))) + .andExpect(jsonPath("$._embedded.tabs[0].rows", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].style", is("rowTwoStyle"))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].style", is("cellOfRowTwoStyle"))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(boxOne)))); + } + private CrisLayoutTabRest parseJson(String name) throws Exception { return new ObjectMapper().readValue(getFileInputStream(name), CrisLayoutTabRest.class); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java index f52adc5daa5f..c6ec8db0387b 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java @@ -8,6 +8,9 @@ package org.dspace.app.rest.matcher; import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_FLAT; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST; import static org.dspace.app.rest.test.AbstractControllerIntegrationTest.REST_SERVER_URL; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.contains; @@ -16,7 +19,6 @@ import static org.hamcrest.text.IsEqualIgnoringCase.equalToIgnoringCase; import org.hamcrest.Matcher; -import org.hamcrest.Matchers; /** * Utility class to construct a Matcher for a browse index @@ -31,7 +33,8 @@ private BrowseIndexMatcher() { } public static Matcher subjectBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.subject.*")), - hasJsonPath("$.metadataBrowse", Matchers.is(true)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_VALUE_LIST)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("text")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -44,7 +47,8 @@ public static Matcher subjectBrowseIndex(final String order) { public static Matcher titleBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.title")), - hasJsonPath("$.metadataBrowse", Matchers.is(false)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_FLAT)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("title")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -56,7 +60,8 @@ public static Matcher titleBrowseIndex(final String order) { public static Matcher contributorBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.contributor.*", "dc.creator")), - hasJsonPath("$.metadataBrowse", Matchers.is(true)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_VALUE_LIST)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("text")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -69,7 +74,8 @@ public static Matcher contributorBrowseIndex(final String order) public static Matcher dateIssuedBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.date.issued")), - hasJsonPath("$.metadataBrowse", Matchers.is(false)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_FLAT)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("date")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -81,7 +87,6 @@ public static Matcher dateIssuedBrowseIndex(final String order) public static Matcher rodeptBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("cris.virtual.department")), - hasJsonPath("$.metadataBrowse", Matchers.is(true)), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$._links.self.href", is(REST_SERVER_URL + "discover/browses/rodept")), @@ -92,7 +97,6 @@ public static Matcher rodeptBrowseIndex(final String order) { public static Matcher typeBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.type")), - hasJsonPath("$.metadataBrowse", Matchers.is(true)), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$._links.self.href", is(REST_SERVER_URL + "discover/browses/type")), @@ -103,7 +107,6 @@ public static Matcher typeBrowseIndex(final String order) { public static Matcher rpnameBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.title")), - hasJsonPath("$.metadataBrowse", Matchers.is(false)), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$._links.self.href", is(REST_SERVER_URL + "discover/browses/rpname")), @@ -114,7 +117,6 @@ public static Matcher rpnameBrowseIndex(final String order) { public static Matcher rpdeptBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("person.affiliation.name")), - hasJsonPath("$.metadataBrowse", Matchers.is(true)), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$._links.self.href", is(REST_SERVER_URL + "discover/browses/rpdept")), @@ -125,7 +127,6 @@ public static Matcher rpdeptBrowseIndex(final String order) { public static Matcher ounameBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.title")), - hasJsonPath("$.metadataBrowse", Matchers.is(false)), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$._links.self.href", is(REST_SERVER_URL + "discover/browses/ouname")), @@ -136,7 +137,6 @@ public static Matcher ounameBrowseIndex(final String order) { public static Matcher pjtitleBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.title")), - hasJsonPath("$.metadataBrowse", Matchers.is(false)), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$._links.self.href", is(REST_SERVER_URL + "discover/browses/pjtitle")), @@ -147,11 +147,41 @@ public static Matcher pjtitleBrowseIndex(final String order) { public static Matcher eqtitleBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.title")), - hasJsonPath("$.metadataBrowse", Matchers.is(false)), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$._links.self.href", is(REST_SERVER_URL + "discover/browses/eqtitle")), hasJsonPath("$._links.items.href", is(REST_SERVER_URL + "discover/browses/eqtitle/items")) + ); + } + + public static Matcher typesBrowseIndex() { + return allOf( + hasJsonPath("$.metadata", contains("dc.type")), + hasJsonPath("$.browseType", is("hierarchicalBrowse")), + hasJsonPath("$.facetType", is("itemtype")), + hasJsonPath("$.type", is("browse")), + hasJsonPath("$._links.self.href", is(REST_SERVER_URL + "discover/browses/types")), + hasJsonPath("$._links.items.href", is(REST_SERVER_URL + "discover/browses/types/items")) ); } + + public static Matcher hierarchicalBrowseIndex( + String vocabulary, String facetType, String metadata + ) { + return allOf( + hasJsonPath("$.metadata", contains(metadata)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_HIERARCHICAL)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), + hasJsonPath("$.facetType", equalToIgnoringCase(facetType)), + hasJsonPath("$.vocabulary", equalToIgnoringCase(vocabulary)), + hasJsonPath("$._links.vocabulary.href", + is(REST_SERVER_URL + String.format("submission/vocabularies/%s/", vocabulary))), + hasJsonPath("$._links.items.href", + is(REST_SERVER_URL + String.format("discover/browses/%s/items", vocabulary))), + hasJsonPath("$._links.entries.href", + is(REST_SERVER_URL + String.format("discover/browses/%s/entries", vocabulary))), + hasJsonPath("$._links.self.href", + is(REST_SERVER_URL + String.format("discover/browses/%s", vocabulary))) + ); + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java index c1223adf1738..29ee72e6c539 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java @@ -130,6 +130,17 @@ public static Matcher hasContentInOriginalBundleFacet(boolean ha ); } + public static Matcher matchFacet(boolean hasNext, String name, String facetType) { + return allOf( + hasJsonPath("$.name", is(name)), + hasJsonPath("$.facetType", is(facetType)), + hasJsonPath("$.facetLimit", any(Integer.class)), + hasJsonPath("$._links.self.href", containsString("api/discover/facets/" + name)), + hasJsonPath("$._links", matchNextLink(hasNext, "api/discover/facets/" + name)) + ); + } + + /** * Check that a facet over the dc.type exists and match the default configuration * diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java index 012c7f8f3eeb..9d5ae5d3bcf0 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java @@ -61,6 +61,16 @@ public static Matcher entrySubject(String label, int count) { ); } + public static Matcher matchEntry(String facet, String label, int count) { + return allOf( + hasJsonPath("$.label", is(label)), + hasJsonPath("$.type", is("discover")), + hasJsonPath("$.count", is(count)), + hasJsonPath("$._links.search.href", containsString("api/discover/search/objects")), + hasJsonPath("$._links.search.href", containsString("f." + facet + "=" + label + ",equals")) + ); + } + public static Matcher entrySubjectWithAuthority(String label, String authority, int count) { return allOf( diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/ItemAuthorityMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/ItemAuthorityMatcher.java index df20e0f65e1f..27e9ffafc1a4 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/ItemAuthorityMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/ItemAuthorityMatcher.java @@ -42,8 +42,9 @@ public static Matcher matchItemAuthorityWithOtherInformations(St hasJsonPath("$.display", is(display)), hasJsonPath("$.value", is(value)), hasJsonPath("$.type", is(type)), - hasJsonPath("$.otherInformation", aMapWithSize(1)), - hasJsonPath("$.otherInformation['" + otherInfMetadata + "']", is(metadataValue)) + hasJsonPath("$.otherInformation", aMapWithSize(2)), + hasJsonPath("$.otherInformation['" + otherInfMetadata + "']", is(metadataValue)), + hasJsonPath("$.otherInformation['" + "data-" + otherInfMetadata + "']", is(metadataValue)) ); } @@ -73,4 +74,18 @@ public static Matcher matchItemAuthorityWithTwoMetadataInOtherIn ) ); } + + public static Matcher matchItemAuthorityWithTwoMetadataInOtherInformations(String authority, + String display, String value, String type, Map orcidAndAffiliation) { + return allOf( + hasJsonPath("$.authority", is(authority)), + hasJsonPath("$.display", is(display)), + hasJsonPath("$.value", is(value)), + hasJsonPath("$.type", is(type)), + hasJsonPath("$.otherInformation", aMapWithSize(4)), + allOf ( + hasJsonPath("$.otherInformation", is(orcidAndAffiliation)) + ) + ); + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepositoryIT.java new file mode 100644 index 000000000000..24a94a4d4bb7 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepositoryIT.java @@ -0,0 +1,82 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.repository; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.matcher.CollectionMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.hamcrest.Matchers; +import org.junit.Before; +import org.junit.Test; + +/** + * Test class for {@link CommunityCollectionLinkRepository} + */ +public class CommunityCollectionLinkRepositoryIT extends AbstractControllerIntegrationTest { + + Community parentCommunity; + Collection collection1; + Collection collection2; + Collection collection3; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .build(); + collection1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + collection2 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 2") + .build(); + collection3 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 3") + .build(); + context.commit(); + context.restoreAuthSystemState(); + } + + @Test + public void getCollections_sortTitleASC() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + + getClient(adminToken).perform(get("/api/core/communities/" + parentCommunity.getID() + "/collections") + .param("sort", "dc.title,ASC")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.collections", Matchers.contains( + CollectionMatcher.matchCollection(collection1), + CollectionMatcher.matchCollection(collection2), + CollectionMatcher.matchCollection(collection3) + ))); + } + + @Test + public void getCollections_sortTitleDESC() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + + getClient(adminToken).perform(get("/api/core/communities/" + parentCommunity.getID() + "/collections") + .param("sort", "dc.title,DESC")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.collections", Matchers.contains( + CollectionMatcher.matchCollection(collection3), + CollectionMatcher.matchCollection(collection2), + CollectionMatcher.matchCollection(collection1) + ))); + } + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepositoryIT.java new file mode 100644 index 000000000000..aa3b1c072187 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepositoryIT.java @@ -0,0 +1,80 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.repository; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.matcher.CommunityMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.CommunityBuilder; +import org.dspace.content.Community; +import org.hamcrest.Matchers; +import org.junit.Before; +import org.junit.Test; + +/** + * Test class for {@link CommunitySubcommunityLinkRepository} + */ +public class CommunitySubcommunityLinkRepositoryIT extends AbstractControllerIntegrationTest { + + Community parentCommunity; + Community subCommunity1; + Community subCommunity2; + Community subCommunity3; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .build(); + subCommunity1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub community 1") + .build(); + subCommunity2 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub community 2") + .build(); + subCommunity3 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub community 3") + .build(); + context.commit(); + context.restoreAuthSystemState(); + } + + @Test + public void getSubCommunities_sortTitleASC() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + + getClient(adminToken).perform(get("/api/core/communities/" + parentCommunity.getID() + "/subcommunities") + .param("sort", "dc.title,ASC")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.subcommunities", Matchers.contains( + CommunityMatcher.matchCommunity(subCommunity1), + CommunityMatcher.matchCommunity(subCommunity2), + CommunityMatcher.matchCommunity(subCommunity3) + ))); + } + + @Test + public void getSubCommunities_sortTitleDESC() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + + getClient(adminToken).perform(get("/api/core/communities/" + parentCommunity.getID() + "/subcommunities") + .param("sort", "dc.title,DESC")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.subcommunities", Matchers.contains( + CommunityMatcher.matchCommunity(subCommunity3), + CommunityMatcher.matchCommunity(subCommunity2), + CommunityMatcher.matchCommunity(subCommunity1) + ))); + } + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java new file mode 100644 index 000000000000..6d1d242cad7f --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java @@ -0,0 +1,990 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.controller; + +import static org.dspace.content.MetadataSchemaEnum.PERSON; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.header; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import java.io.InputStream; +import java.text.DateFormat; +import java.text.MessageFormat; +import java.text.SimpleDateFormat; +import java.util.Date; + +import org.apache.commons.codec.CharEncoding; +import org.apache.commons.io.IOUtils; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EntityTypeBuilder; +import org.dspace.builder.GroupBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.RelationshipBuilder; +import org.dspace.builder.RelationshipTypeBuilder; +import org.dspace.builder.WorkspaceItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Collection; +import org.dspace.content.EntityType; +import org.dspace.content.Item; +import org.dspace.content.MetadataSchemaEnum; +import org.dspace.content.RelationshipType; +import org.dspace.content.WorkspaceItem; +import org.dspace.content.authority.Choices; +import org.dspace.content.authority.service.ChoiceAuthorityService; +import org.dspace.content.authority.service.MetadataAuthorityService; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.RelationshipTypeService; +import org.dspace.core.Constants; +import org.dspace.eperson.Group; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.util.SimpleMapConverter; +import org.hamcrest.Matchers; +import org.junit.Before; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +public class LinksetRestControllerIT extends AbstractControllerIntegrationTest { + + private static final String doiPattern = "https://doi.org/{0}"; + private static final String orcidPattern = "http://orcid.org/{0}"; + private static final String doi = "10.1007/978-3-642-35233-1_18"; + private static final String PERSON_ENTITY_TYPE = "Person"; + + private Collection collection; + + @Autowired + private ConfigurationService configurationService; + + @Autowired + private MetadataAuthorityService metadataAuthorityService; + + @Autowired + private ChoiceAuthorityService choiceAuthorityService; + + @Autowired + private ItemService itemService; + + @Autowired + private BitstreamService bitstreamService; + + @Autowired + private RelationshipTypeService relationshipTypeService; + + @Autowired + private SimpleMapConverter mapConverterDSpaceToSchemaOrgUri; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .withEntityType("Publication") + .build(); + context.restoreAuthSystemState(); + } + + @Test + public void findAllItemsLinksets() throws Exception { + getClient().perform(get("/signposting")) + .andExpect(status().isMethodNotAllowed()); + } + + @Test + public void findOneItemJsonLinksets() throws Exception { + String url = configurationService.getProperty("dspace.ui.url"); + String signpostingUrl = configurationService.getProperty("signposting.path"); + String mimeType = "application/vnd.datacite.datacite+xml"; + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.linkset", + Matchers.hasSize(2))) + .andExpect(jsonPath("$.linkset[0].cite-as[0].href", + Matchers.hasToString(url + "/handle/" + item.getHandle()))) + .andExpect(jsonPath("$.linkset[0].describedby[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(jsonPath("$.linkset[0].describedby[0].type", + Matchers.hasToString(mimeType))) + .andExpect(jsonPath("$.linkset[0].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[0].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].type", + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[1].describes[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[1].describes[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[1].anchor", + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(header().stringValues("Content-Type", "application/linkset+json;charset=UTF-8")); + } + + @Test + public void findOneItemJsonLinksetsWithType() throws Exception { + String url = configurationService.getProperty("dspace.ui.url"); + String signpostingUrl = configurationService.getProperty("signposting.path"); + String mimeType = "application/vnd.datacite.datacite+xml"; + String articleUri = mapConverterDSpaceToSchemaOrgUri.getValue("Article"); + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .withType("Article") + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.linkset", + Matchers.hasSize(2))) + .andExpect(jsonPath("$.linkset[0].cite-as[0].href", + Matchers.hasToString(url + "/handle/" + item.getHandle()))) + .andExpect(jsonPath("$.linkset[0].describedby[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(jsonPath("$.linkset[0].describedby[0].type", + Matchers.hasToString(mimeType))) + .andExpect(jsonPath("$.linkset[0].type", + Matchers.hasSize(2))) + .andExpect(jsonPath("$.linkset[0].type[0].href", + Matchers.hasToString("https://schema.org/AboutPage"))) + .andExpect(jsonPath("$.linkset[0].type[1].href", + Matchers.hasToString(articleUri))) + .andExpect(jsonPath("$.linkset[0].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[0].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].type", + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[1].describes[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[1].describes[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[1].anchor", + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(header().stringValues("Content-Type", "application/linkset+json;charset=UTF-8")); + } + + @Test + public void findOneItemJsonLinksetsWithLicence() throws Exception { + String licenceUrl = "https://exmple.com/licence"; + String url = configurationService.getProperty("dspace.ui.url"); + String signpostingUrl = configurationService.getProperty("signposting.path"); + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata(MetadataSchemaEnum.DC.getName(), "rights", "uri", licenceUrl) + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.linkset", + Matchers.hasSize(2))) + .andExpect(jsonPath("$.linkset[0].type[0].href", + Matchers.hasToString("https://schema.org/AboutPage"))) + .andExpect(jsonPath("$.linkset[0].license[0].href", + Matchers.hasToString(licenceUrl))) + .andExpect(jsonPath("$.linkset[0].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[0].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].type", + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[1].describes[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[1].describes[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[1].anchor", + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(header().stringValues("Content-Type", "application/linkset+json;charset=UTF-8")); + } + + @Test + public void findOneItemJsonLinksetsWithBitstreams() throws Exception { + String bitstream1Content = "ThisIsSomeDummyText"; + String bitstream1MimeType = "text/plain"; + String bitstream2Content = "ThisIsSomeAlternativeDummyText"; + String bitstream2MimeType = "application/pdf"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstream1Content, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream 1") + .withDescription("description") + .withMimeType(bitstream1MimeType) + .build(); + } + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstream2Content, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream 2") + .withDescription("description") + .withMimeType(bitstream2MimeType) + .build(); + } + context.restoreAuthSystemState(); + + String url = configurationService.getProperty("dspace.ui.url"); + String signpostingUrl = configurationService.getProperty("signposting.path"); + String mimeType = "application/vnd.datacite.datacite+xml"; + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.linkset", + Matchers.hasSize(4))) + .andExpect(jsonPath("$.linkset[0].cite-as[0].href", + Matchers.hasToString(url + "/handle/" + item.getHandle()))) + .andExpect(jsonPath("$.linkset[0].describedby[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(jsonPath("$.linkset[0].describedby[0].type", + Matchers.hasToString(mimeType))) + .andExpect(jsonPath("$.linkset[0].item[0].href", + Matchers.hasToString(url + "/bitstreams/" + bitstream1.getID() + "/download"))) + .andExpect(jsonPath("$.linkset[0].item[0].type", + Matchers.hasToString(bitstream1MimeType))) + .andExpect(jsonPath("$.linkset[0].item[1].href", + Matchers.hasToString(url + "/bitstreams/" + bitstream2.getID() + "/download"))) + .andExpect(jsonPath("$.linkset[0].item[1].type", + Matchers.hasToString(bitstream2MimeType))) + .andExpect(jsonPath("$.linkset[0].anchor", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[0].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[0].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].type", + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[1].collection[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[1].collection[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[1].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[1].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[1].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[1].linkset[1].type", + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[1].anchor", + Matchers.hasToString(url + "/bitstreams/" + bitstream1.getID() + "/download"))) + .andExpect(jsonPath("$.linkset[2].collection[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[2].collection[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[2].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[2].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[2].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[2].linkset[1].type", + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[2].anchor", + Matchers.hasToString(url + "/bitstreams/" + bitstream2.getID() + "/download"))) + .andExpect(jsonPath("$.linkset[3].describes[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[3].describes[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[3].anchor", + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(header().stringValues("Content-Type", "application/linkset+json;charset=UTF-8")); + } + + @Test + public void findOneItemJsonLinksetsWithBitstreamsFromDifferentBundles() throws Exception { + String bitstream1Content = "ThisIsSomeDummyText"; + String bitstream1MimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstream1Content, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, item, is, Constants.DEFAULT_BUNDLE_NAME) + .withName("Bitstream 1") + .withDescription("description") + .withMimeType(bitstream1MimeType) + .build(); + } + + try (InputStream is = IOUtils.toInputStream("test", CharEncoding.UTF_8)) { + Bitstream bitstream2 = BitstreamBuilder.createBitstream(context, item, is, "TEXT") + .withName("Bitstream 2") + .withDescription("description") + .withMimeType("application/pdf") + .build(); + } + + try (InputStream is = IOUtils.toInputStream("test", CharEncoding.UTF_8)) { + Bitstream bitstream3 = BitstreamBuilder.createBitstream(context, item, is, "THUMBNAIL") + .withName("Bitstream 3") + .withDescription("description") + .withMimeType("application/pdf") + .build(); + } + + try (InputStream is = IOUtils.toInputStream("test", CharEncoding.UTF_8)) { + Bitstream bitstream4 = BitstreamBuilder.createBitstream(context, item, is, "LICENSE") + .withName("Bitstream 4") + .withDescription("description") + .withMimeType("application/pdf") + .build(); + } + + context.restoreAuthSystemState(); + + String url = configurationService.getProperty("dspace.ui.url"); + String signpostingUrl = configurationService.getProperty("signposting.path"); + String mimeType = "application/vnd.datacite.datacite+xml"; + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.linkset", + Matchers.hasSize(3))) + .andExpect(jsonPath("$.linkset[0].cite-as[0].href", + Matchers.hasToString(url + "/handle/" + item.getHandle()))) + .andExpect(jsonPath("$.linkset[0].describedby[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(jsonPath("$.linkset[0].describedby[0].type", + Matchers.hasToString(mimeType))) + .andExpect(jsonPath("$.linkset[0].item", + Matchers.hasSize(1))) + .andExpect(jsonPath("$.linkset[0].item[0].href", + Matchers.hasToString(url + "/bitstreams/" + bitstream1.getID() + "/download"))) + .andExpect(jsonPath("$.linkset[0].item[0].type", + Matchers.hasToString(bitstream1MimeType))) + .andExpect(jsonPath("$.linkset[0].anchor", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[0].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[0].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].type", + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[1].collection[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[1].collection[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[1].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[1].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[1].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[1].linkset[1].type", + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[1].anchor", + Matchers.hasToString(url + "/bitstreams/" + bitstream1.getID() + "/download"))) + .andExpect(jsonPath("$.linkset[2].describes[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[2].describes[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[2].anchor", + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(header().stringValues("Content-Type", "application/linkset+json;charset=UTF-8")); + } + + @Test + public void findOneItemThatIsInWorkspaceJsonLinksets() throws Exception { + context.turnOffAuthorisationSystem(); + WorkspaceItem workspaceItem = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("Workspace Item") + .build(); + itemService.addMetadata(context, workspaceItem.getItem(), "dc", "identifier", "doi", Item.ANY, doi); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + workspaceItem.getItem().getID() + "/json")) + .andExpect(status().isUnauthorized()); + } + + @Test + public void findOneWithdrawnItemJsonLinksets() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .withdrawn() + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isUnauthorized()); + } + + @Test + public void findOneEmbargoItemJsonLinksets() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .withIssueDate("2017-11-18") + .withEmbargoPeriod("2 week") + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isUnauthorized()); + } + + @Test + public void findOneRestrictedItemJsonLinksets() throws Exception { + context.turnOffAuthorisationSystem(); + Group internalGroup = GroupBuilder.createGroup(context) + .withName("Internal Group") + .build(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .withReaderGroup(internalGroup) + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isUnauthorized()); + } + + @Test + public void findOneUnDiscoverableItemJsonLinksets() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .makeUnDiscoverable() + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isUnauthorized()); + } + + @Test + public void findOneBitstreamJsonLinksets() throws Exception { + String bitstreamContent = "ThisIsSomeDummyText"; + String bitstreamMimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withDescription("description") + .withMimeType(bitstreamMimeType) + .build(); + } + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + bitstream.getID() + "/json")) + .andExpect(status().isNotFound()); + } + + @Test + public void findOneCollectionJsonLinksets() throws Exception { + getClient().perform(get("/signposting/linksets/" + collection.getID() + "/json")) + .andExpect(status().isNotFound()); + } + + @Test + public void findOneCommunityJsonLinksets() throws Exception { + getClient().perform(get("/signposting/linksets/" + parentCommunity.getID() + "/json")) + .andExpect(status().isNotFound()); + } + + @Test + public void findOneItemLsetLinksets() throws Exception { + String bitstream1Content = "ThisIsSomeDummyText"; + String bitstream1MimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .build(); + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstream1Content, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream 1") + .withDescription("description") + .withMimeType(bitstream1MimeType) + .build(); + } + context.restoreAuthSystemState(); + + String url = configurationService.getProperty("dspace.ui.url"); + String signpostingUrl = configurationService.getProperty("signposting.path"); + String mimeType = "application/vnd.datacite.datacite+xml"; + String siteAsRelation = "<" + url + "/handle/" + item.getHandle() + "> ; rel=\"cite-as\" ; anchor=\"" + + url + "/entities/publication/" + item.getID() + "\" ,"; + String itemRelation = "<" + url + "/bitstreams/" + bitstream1.getID() + + "/download> ; rel=\"item\" ; " + "type=\"text/plain\" ; anchor=\"" + url + "/entities/publication/" + + item.getID() + "\" ,"; + String typeRelation = " ; rel=\"type\" ; anchor=\"" + + url + "/entities/publication/" + item.getID() + "\" ,"; + String linksetRelation = "<" + url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "> ; rel=\"linkset\" ; type=\"application/linkset\" ;" + + " anchor=\"" + url + "/entities/publication/" + item.getID() + "\" ,"; + String jsonLinksetRelation = "<" + url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json> ; rel=\"linkset\" ; type=\"application/linkset+json\" ;" + + " anchor=\"" + url + "/entities/publication/" + item.getID() + "\" ,"; + String describedByRelation = "<" + url + "/" + signpostingUrl + "/describedby/" + item.getID() + + "> ; rel=\"describedby\" ;" + " type=\"" + mimeType + "\" ; anchor=\"" + url + + "/entities/publication/" + item.getID() + "\" ,"; + + String bitstreamCollectionLink = "<" + url + "/entities/publication/" + item.getID() + "> ;" + + " rel=\"collection\" ; type=\"text/html\" ; anchor=\"" + url + "/bitstreams/" + + bitstream1.getID() + "/download\""; + String bitstreamLinksetLink = "<" + url + "/" + signpostingUrl + "/linksets/" + item.getID() + "> ; " + + "rel=\"linkset\" ; type=\"application/linkset\" ; " + + "anchor=\"" + url + "/bitstreams/" + bitstream1.getID() + "/download\""; + String bitstreamLinksetJsonLink = "<" + url + "/" + signpostingUrl + "/linksets/" + item.getID() + "/json> ; " + + "rel=\"linkset\" ; type=\"application/linkset+json\" ; " + + "anchor=\"" + url + "/bitstreams/" + bitstream1.getID() + "/download\""; + + String describesMetadataLink = "<" + url + "/entities/publication/" + item.getID() + "> ; " + + "rel=\"describes\" ; type=\"text/html\" ; " + + "anchor=\"" + url + "/" + signpostingUrl + "/describedby/" + item.getID() + "\""; + + getClient().perform(get("/signposting/linksets/" + item.getID())) + .andExpect(content().string(Matchers.containsString(siteAsRelation))) + .andExpect(content().string(Matchers.containsString(itemRelation))) + .andExpect(content().string(Matchers.containsString(typeRelation))) + .andExpect(content().string(Matchers.containsString(linksetRelation))) + .andExpect(content().string(Matchers.containsString(jsonLinksetRelation))) + .andExpect(content().string(Matchers.containsString(describedByRelation))) + .andExpect(content().string(Matchers.containsString(bitstreamCollectionLink))) + .andExpect(content().string(Matchers.containsString(bitstreamLinksetLink))) + .andExpect(content().string(Matchers.containsString(bitstreamLinksetJsonLink))) + .andExpect(content().string(Matchers.containsString(describesMetadataLink))) + .andExpect(header().stringValues("Content-Type", "application/linkset;charset=UTF-8")); + } + + @Test + public void findOneUnDiscoverableItemLsetLinksets() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .makeUnDiscoverable() + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID())) + .andExpect(status().isUnauthorized()); + } + + @Test + public void findTypedLinkForItemWithAuthor() throws Exception { + String bitstreamContent = "ThisIsSomeDummyText"; + String bitstreamMimeType = "text/plain"; + String orcidValue = "orcidValue"; + + context.turnOffAuthorisationSystem(); + + Collection personCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType(PERSON_ENTITY_TYPE) + .build(); + + Item author = ItemBuilder.createItem(context, personCollection) + .withPersonIdentifierLastName("familyName") + .withPersonIdentifierFirstName("firstName") + .withMetadata(PERSON.getName(), "identifier", "orcid", orcidValue) + .build(); + Item publication = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .withAuthor("John", author.getID().toString(), Choices.CF_ACCEPTED) + .build(); + + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, publication, is) + .withName("Bitstream") + .withDescription("description") + .withMimeType(bitstreamMimeType) + .build(); + } + + EntityType publicationEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); + EntityType authorEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, PERSON_ENTITY_TYPE).build(); + RelationshipType isAuthorOfPublicationRelationshipType = + RelationshipTypeBuilder.createRelationshipTypeBuilder(context, publicationEntityType, authorEntityType, + "isAuthorOfPublication", "isPublicationOfAuthor", + null, null, null, null).build(); + isAuthorOfPublicationRelationshipType.setTilted(RelationshipType.Tilted.LEFT); + isAuthorOfPublicationRelationshipType = + relationshipTypeService.create(context, isAuthorOfPublicationRelationshipType); + RelationshipBuilder.createRelationshipBuilder(context, publication, author, + isAuthorOfPublicationRelationshipType).build(); + + context.restoreAuthSystemState(); + + String url = configurationService.getProperty("dspace.ui.url"); + String signpostingUrl = configurationService.getProperty("signposting.path"); + String mimeType = "application/vnd.datacite.datacite+xml"; + String dcIdentifierUriMetadataValue = itemService + .getMetadataFirstValue(publication, "dc", "identifier", "uri", Item.ANY); + + getClient().perform(get("/signposting/links/" + publication.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", + Matchers.hasSize(7))) + .andExpect(jsonPath("$[?(@.href == '" + MessageFormat.format(orcidPattern, orcidValue) + "' " + + "&& @.rel == 'author')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + url + "/" + signpostingUrl + "/describedby/" + + publication.getID() + "' " + + "&& @.rel == 'describedby' " + + "&& @.type == '" + mimeType + "')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + dcIdentifierUriMetadataValue + "' " + + "&& @.rel == 'cite-as')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + url + "/bitstreams/" + bitstream.getID() + "/download' " + + "&& @.rel == 'item' " + + "&& @.type == 'text/plain')]").exists()) + .andExpect(jsonPath("$[?(@.href == 'https://schema.org/AboutPage' " + + "&& @.rel == 'type')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + url + "/" + signpostingUrl + "/linksets/" + + publication.getID().toString() + "' " + + "&& @.rel == 'linkset' " + + "&& @.type == 'application/linkset')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + url + "/" + signpostingUrl + "/linksets/" + + publication.getID().toString() + "/json' " + + "&& @.rel == 'linkset' " + + "&& @.type == 'application/linkset+json')]").exists()); + } + + @Test + public void findTypedLinkForBitstream() throws Exception { + String bitstreamContent = "ThisIsSomeDummyText"; + String bitstreamMimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withDescription("description") + .withMimeType(bitstreamMimeType) + .build(); + } + context.restoreAuthSystemState(); + + String uiUrl = configurationService.getProperty("dspace.ui.url"); + getClient().perform(get("/signposting/links/" + bitstream.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", + Matchers.hasSize(3))) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/entities/publication/" + item.getID() + "' " + + "&& @.rel == 'collection' " + + "&& @.type == 'text/html')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "' " + + "&& @.rel == 'linkset' " + + "&& @.type == 'application/linkset')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "/json" + + "' && @.rel == 'linkset' " + + "&& @.type == 'application/linkset+json')]").exists()); + + DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + } + + @Test + public void findTypedLinkForBitstreamWithType() throws Exception { + String bitstreamContent = "ThisIsSomeDummyText"; + String bitstreamMimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withDescription("description") + .withMimeType(bitstreamMimeType) + .build(); + } + bitstreamService.addMetadata(context, bitstream, "dc", "type", null, Item.ANY, "Article"); + + context.restoreAuthSystemState(); + + String uiUrl = configurationService.getProperty("dspace.ui.url"); + getClient().perform(get("/signposting/links/" + bitstream.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", + Matchers.hasSize(4))) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/entities/publication/" + item.getID() + "' " + + "&& @.rel == 'collection' " + + "&& @.type == 'text/html')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "' " + + "&& @.rel == 'linkset' " + + "&& @.type == 'application/linkset')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "/json" + + "' && @.rel == 'linkset' " + + "&& @.type == 'application/linkset+json')]").exists()) + .andExpect(jsonPath("$[?(@.href == 'https://schema.org/ScholarlyArticle' " + + "&& @.rel == 'type')]").exists()); + + DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + } + + @Test + public void findTypedLinkForRestrictedBitstream() throws Exception { + String bitstreamContent = "ThisIsSomeDummyText"; + String bitstreamMimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + Group internalGroup = GroupBuilder.createGroup(context) + .withName("Internal Group") + .build(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withDescription("description") + .withMimeType(bitstreamMimeType) + .withReaderGroup(internalGroup) + .build(); + } + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/links/" + bitstream.getID())) + .andExpect(status().isUnauthorized()); + + DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + } + + @Test + public void findTypedLinkForBitstreamUnderEmbargo() throws Exception { + String bitstreamContent = "ThisIsSomeDummyText"; + String bitstreamMimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withIssueDate("2017-10-17") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withDescription("description") + .withMimeType(bitstreamMimeType) + .withEmbargoPeriod("6 months") + .build(); + } + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/links/" + bitstream.getID())) + .andExpect(status().isUnauthorized()); + + DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + } + + @Test + public void findTypedLinkForBitstreamOfWorkspaceItem() throws Exception { + String bitstreamContent = "ThisIsSomeDummyText"; + String bitstreamMimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + WorkspaceItem workspaceItem = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("Workspace Item") + .build(); + Item item = workspaceItem.getItem(); + itemService.addMetadata(context, item, "dc", "identifier", "doi", Item.ANY, doi); + + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, workspaceItem.getItem(), is) + .withName("Bitstream") + .withDescription("description") + .withMimeType(bitstreamMimeType) + .build(); + } + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/links/" + bitstream.getID())) + .andExpect(status().isUnauthorized()); + + DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + } + + @Test + public void findTypedLinkForUnDiscoverableItem() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .makeUnDiscoverable() + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/links/" + item.getID())) + .andExpect(status().isUnauthorized()); + + DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + } + + @Test + public void getDescribedBy() throws Exception { + context.turnOffAuthorisationSystem(); + DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); + String currentDateInFormat = dateFormat.format(new Date()); + String title = "Item Test"; + Item item = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "identifier", "doi", doi) + .build(); + String responseMimeType = "application/vnd.datacite.datacite+xml"; + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/describedby/" + item.getID())) + .andExpect(status().isOk()) + .andExpect(content().string(Matchers.containsString(title))) + .andExpect(header().stringValues("Content-Type", responseMimeType + ";charset=UTF-8")); + } + + @Test + public void getDescribedByItemThatIsInWorkspace() throws Exception { + context.turnOffAuthorisationSystem(); + WorkspaceItem workspaceItem = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("Workspace Item") + .build(); + itemService.addMetadata(context, workspaceItem.getItem(), "dc", "identifier", "doi", Item.ANY, doi); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/describedby/" + workspaceItem.getItem().getID())) + .andExpect(status().isUnauthorized()); + } + + @Test + public void getDescribedByWithdrawnItem() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .withdrawn() + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/describedby/" + item.getID())) + .andExpect(status().isUnauthorized()); + } + + @Test + public void getDescribedByEmbargoItem() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .withIssueDate("2017-11-18") + .withEmbargoPeriod("2 week") + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/describedby/" + item.getID())) + .andExpect(status().isUnauthorized()); + } + + @Test + public void getDescribedByRestrictedItem() throws Exception { + context.turnOffAuthorisationSystem(); + Group internalGroup = GroupBuilder.createGroup(context) + .withName("Internal Group") + .build(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .withReaderGroup(internalGroup) + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/describedby/" + item.getID())) + .andExpect(status().isUnauthorized()); + } + + @Test + public void getDescribedByUnDiscoverableItem() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .makeUnDiscoverable() + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/describedby/" + item.getID())) + .andExpect(status().isUnauthorized()); + } +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractControllerIntegrationTest.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractControllerIntegrationTest.java index 00339ba2e482..4ec66fb00081 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractControllerIntegrationTest.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractControllerIntegrationTest.java @@ -23,7 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.lang3.StringUtils; import org.dspace.AbstractIntegrationTestWithDatabase; -import org.dspace.app.rest.Application; +import org.dspace.app.TestApplication; import org.dspace.app.rest.model.patch.Operation; import org.dspace.app.rest.utils.DSpaceConfigurationInitializer; import org.dspace.app.rest.utils.DSpaceKernelInitializer; @@ -68,7 +68,7 @@ // Specify main class to use to load Spring ApplicationContext // NOTE: By default, Spring caches and reuses ApplicationContext for each integration test (to speed up tests) // See: https://docs.spring.io/spring/docs/current/spring-framework-reference/testing.html#integration-testing -@SpringBootTest(classes = Application.class) +@SpringBootTest(classes = TestApplication.class) // Load DSpace initializers in Spring ApplicationContext (to initialize DSpace Kernel & Configuration) @ContextConfiguration(initializers = { DSpaceKernelInitializer.class, DSpaceConfigurationInitializer.class }) // Tell Spring to make ApplicationContext an instance of WebApplicationContext (for web-based tests) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractWebClientIntegrationTest.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractWebClientIntegrationTest.java index 6556624c6b11..be0a27b4ebd1 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractWebClientIntegrationTest.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractWebClientIntegrationTest.java @@ -9,7 +9,7 @@ import org.apache.commons.lang3.StringUtils; import org.dspace.AbstractIntegrationTestWithDatabase; -import org.dspace.app.rest.Application; +import org.dspace.app.TestApplication; import org.dspace.app.rest.utils.DSpaceConfigurationInitializer; import org.dspace.app.rest.utils.DSpaceKernelInitializer; import org.junit.runner.RunWith; @@ -46,7 +46,7 @@ // ALSO tell Spring to start a web server on a random port // NOTE: By default, Spring caches and reuses ApplicationContext for each integration test (to speed up tests) // See: https://docs.spring.io/spring/docs/current/spring-framework-reference/testing.html#integration-testing -@SpringBootTest(classes = Application.class, webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) +@SpringBootTest(classes = TestApplication.class, webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) // Load DSpace initializers in Spring ApplicationContext (to initialize DSpace Kernel & Configuration) @ContextConfiguration(initializers = { DSpaceKernelInitializer.class, DSpaceConfigurationInitializer.class }) // Load our src/test/resources/application-test.properties to override some settings in default application.properties diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java index 6c9544d2f927..e21f395f0907 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java @@ -115,6 +115,8 @@ public void setUp() throws Exception { sortConfiguration.setSortFields(listSortField); + sortConfiguration.setDefaultSortField(defaultSort); + discoveryConfiguration.setSearchSortConfiguration(sortConfiguration); DiscoverySearchFilterFacet subjectFacet = new DiscoverySearchFilterFacet(); @@ -167,6 +169,16 @@ public void testSortByScore() throws Exception { page.getOffset(), "SCORE", "ASC"); } + @Test + public void testSortByDefaultSortField() throws Exception { + page = PageRequest.of(2, 10); + restQueryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), page); + + verify(discoverQueryBuilder, times(1)) + .buildQuery(context, null, discoveryConfiguration, null, emptyList(), emptyList(), + page.getPageSize(), page.getOffset(), null, null); + } + @Test(expected = DSpaceBadRequestException.class) public void testCatchIllegalArgumentException() throws Exception { when(discoverQueryBuilder.buildQuery(any(), any(), any(), any(), any(), anyList(), any(), any(), any(), diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java b/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java index 27c37f1487e4..ccb7d43a2378 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java @@ -11,7 +11,6 @@ import org.apache.commons.cli.Options; import org.dspace.app.rest.converter.ScriptConverter; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; /** @@ -28,10 +27,6 @@ public void setDspaceRunnableClass(final Class dspaceRunnableClass) { } - public boolean isAllowedToExecute(final Context context) { - return true; - } - public Options getOptions() { Options options = new Options(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/authority/CrisConsumerIT.java b/dspace-server-webapp/src/test/java/org/dspace/authority/CrisConsumerIT.java index 0cceb70bb218..2d887caa7b2c 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/authority/CrisConsumerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/authority/CrisConsumerIT.java @@ -22,6 +22,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.matches; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; @@ -56,6 +57,8 @@ import org.dspace.content.Item; import org.dspace.content.MetadataValue; import org.dspace.content.WorkspaceItem; +import org.dspace.content.authority.ChoiceAuthorityServiceImpl; +import org.dspace.content.authority.service.MetadataAuthorityService; import org.dspace.content.service.ItemService; import org.dspace.eperson.EPerson; import org.dspace.external.OrcidRestConnector; @@ -89,10 +92,13 @@ public class CrisConsumerIT extends AbstractControllerIntegrationTest { @Autowired private ConfigurationService configurationService; + @Autowired + private ChoiceAuthorityServiceImpl choiceAuthorityService; + @Value("classpath:org/dspace/app/rest/simple-article.pdf") private Resource simpleArticle; - @Value("classpath:org/dspace/authority/orcid/orcid-person-record.xml") + @Value("classpath:org/dspace/authority/orcid/orcid-record.xml") private Resource orcidPersonRecord; private EPerson submitter; @@ -110,6 +116,9 @@ public class CrisConsumerIT extends AbstractControllerIntegrationTest { @Autowired private OrcidV3AuthorDataProvider orcidV3AuthorDataProvider; + @Autowired + private MetadataAuthorityService metadataAuthorityService; + @Override public void setUp() throws Exception { super.setUp(); @@ -1058,7 +1067,7 @@ public void testOrcidImportFiller() throws Exception { String orcid = "0000-0002-9029-1854"; - when(mockOrcidConnector.get(eq(orcid + "/person"), any())) + when(mockOrcidConnector.get(matches("^\\d{4}-\\d{4}-\\d{4}-\\d{4}$"), any())) .thenAnswer(i -> orcidPersonRecord.getInputStream()); try { @@ -1076,7 +1085,7 @@ public void testOrcidImportFiller() throws Exception { context.restoreAuthSystemState(); - verify(mockOrcidConnector).get(eq(orcid + "/person"), any()); + verify(mockOrcidConnector).get(eq(orcid), any()); verifyNoMoreInteractions(mockOrcidConnector); String authToken = getAuthToken(submitter.getEmail(), password); @@ -1126,54 +1135,74 @@ public void testOrcidImportFiller() throws Exception { @Test public void testSherpaImportFiller() throws Exception { - String issn = "2731-0582"; + try { + configurationService.setProperty("authority.controlled.dc.relation.journal", "true"); + configurationService.setProperty("choices.plugin.dc.relation.journal", "JournalAuthority"); + configurationService.setProperty("choices.presentation.dc.relation.journal", "suggest"); + configurationService.setProperty("choices.closed.dc.relation.journal", "true"); + configurationService.setProperty("cris.ItemAuthority.JournalAuthority.entityType", "Journal"); + configurationService.setProperty("cris.ItemAuthority.JournalAuthority.relationshipType", "Journal"); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); - context.turnOffAuthorisationSystem(); + String issn = "2731-0582"; - Collection journals = createCollection("Collection of journals", "Journal", subCommunity); + context.turnOffAuthorisationSystem(); - Item publication = ItemBuilder.createItem(context, publicationCollection) - .withTitle("Test Publication") - .withRelationJournal("Nature Synthesis", "will be generated::ISSN::" + issn) - .build(); + Collection journals = createCollection("Collection of journals", "Journal", subCommunity); - context.commit(); + Item publication = ItemBuilder.createItem(context, publicationCollection) + .withTitle("Test Publication") + .withRelationJournal("Nature Synthesis", "will be generated::ISSN::" + issn) + .build(); - context.restoreAuthSystemState(); + context.commit(); - String authToken = getAuthToken(submitter.getEmail(), password); - ItemRest item = getItemViaRestByID(authToken, publication.getID()); + context.restoreAuthSystemState(); - MetadataValueRest journalMetadata = findSingleMetadata(item, "dc.relation.journal"); + String authToken = getAuthToken(submitter.getEmail(), password); + ItemRest item = getItemViaRestByID(authToken, publication.getID()); - UUID journalId = UUIDUtils.fromString(journalMetadata.getAuthority()); - assertThat(journalId, notNullValue()); + MetadataValueRest journalMetadata = findSingleMetadata(item, "dc.relation.journal"); - Item journal = itemService.find(context, journalId); - assertThat(journal, notNullValue()); - assertThat(journal.getOwningCollection(), is(journals)); - assertThat(journal.getMetadata(), hasItems( - with("dc.title", "Nature Synthesis"), - with("dc.identifier.issn", issn), - with("cris.sourceId", "ISSN::" + issn))); + UUID journalId = UUIDUtils.fromString(journalMetadata.getAuthority()); + assertThat(journalId, notNullValue()); - context.turnOffAuthorisationSystem(); + Item journal = itemService.find(context, journalId); + assertThat(journal, notNullValue()); + assertThat(journal.getOwningCollection(), is(journals)); + assertThat(journal.getMetadata(), hasItems( + with("dc.title", "Nature Synthesis"), + with("dc.identifier.issn", issn), + with("cris.sourceId", "ISSN::" + issn))); - publicationCollection = context.reloadEntity(publicationCollection); + context.turnOffAuthorisationSystem(); - Item anotherPublication = ItemBuilder.createItem(context, publicationCollection) - .withTitle("Test Publication 2") - .withRelationJournal("Nature Synthesis", "will be generated::ISSN::" + issn) - .build(); + publicationCollection = context.reloadEntity(publicationCollection); - context.commit(); + Item anotherPublication = ItemBuilder.createItem(context, publicationCollection) + .withTitle("Test Publication 2") + .withRelationJournal("Nature Synthesis", "will be generated::ISSN::" + issn) + .build(); - context.restoreAuthSystemState(); + context.commit(); + + context.restoreAuthSystemState(); - item = getItemViaRestByID(authToken, anotherPublication.getID()); - journalMetadata = findSingleMetadata(item, "dc.relation.journal"); - assertThat(UUIDUtils.fromString(journalMetadata.getAuthority()), is(journal.getID())); + item = getItemViaRestByID(authToken, anotherPublication.getID()); + journalMetadata = findSingleMetadata(item, "dc.relation.journal"); + assertThat(UUIDUtils.fromString(journalMetadata.getAuthority()), is(journal.getID())); + } finally { + configurationService.setProperty("authority.controlled.dc.relation.journal", "false"); + configurationService.setProperty("choices.plugin.dc.relation.journal", null); + configurationService.setProperty("choices.presentation.dc.relation.journal", null); + configurationService.setProperty("choices.closed.dc.relation.journal", null); + configurationService.setProperty("cris.ItemAuthority.JournalAuthority.entityType", null); + configurationService.setProperty("cris.ItemAuthority.JournalAuthority.relationshipType", null); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + } } private ItemRest getItemViaRestByID(String authToken, UUID id) throws Exception { diff --git a/dspace-server-webapp/src/test/java/org/dspace/curate/CurationScriptIT.java b/dspace-server-webapp/src/test/java/org/dspace/curate/CurationScriptIT.java index a528f4351356..37b54f481235 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/curate/CurationScriptIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/curate/CurationScriptIT.java @@ -14,6 +14,7 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.io.File; +import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.concurrent.atomic.AtomicReference; @@ -29,13 +30,19 @@ import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; import org.dspace.builder.ItemBuilder; import org.dspace.builder.ProcessBuilder; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.content.ProcessStatus; +import org.dspace.content.Site; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.eperson.EPerson; import org.dspace.scripts.DSpaceCommandLineParameter; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.dspace.scripts.service.ScriptService; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; @@ -49,6 +56,9 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest { @Autowired private DSpaceRunnableParameterConverter dSpaceRunnableParameterConverter; + @Autowired + private ScriptService scriptService; + private final static String SCRIPTS_ENDPOINT = "/api/" + ScriptRest.CATEGORY + "/" + ScriptRest.PLURAL_NAME; private final static String CURATE_SCRIPT_ENDPOINT = SCRIPTS_ENDPOINT + "/curate/" + ProcessRest.PLURAL_NAME; @@ -77,6 +87,7 @@ public void curateScript_invalidTaskOption() throws Exception { parameters.add(new DSpaceCommandLineParameter("-i", publicItem1.getHandle())); parameters.add(new DSpaceCommandLineParameter("-t", "invalidTaskOption")); + parameters.add(new DSpaceCommandLineParameter("-s", "open")); List list = parameters.stream() .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter @@ -100,6 +111,7 @@ public void curateScript_MissingHandle() throws Exception { LinkedList parameters = new LinkedList<>(); parameters.add(new DSpaceCommandLineParameter("-t", CurationClientOptions.getTaskOptions().get(0))); + parameters.add(new DSpaceCommandLineParameter("-s", "open")); List list = parameters.stream() .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter @@ -121,6 +133,7 @@ public void curateScript_invalidHandle() throws Exception { LinkedList parameters = new LinkedList<>(); parameters.add(new DSpaceCommandLineParameter("-i", "invalidhandle")); + parameters.add(new DSpaceCommandLineParameter("-s", "open")); parameters.add(new DSpaceCommandLineParameter("-t", CurationClientOptions.getTaskOptions().get(0))); List list = parameters.stream() @@ -160,6 +173,7 @@ public void curateScript_MissingTaskOrTaskFile() throws Exception { LinkedList parameters = new LinkedList<>(); parameters.add(new DSpaceCommandLineParameter("-i", publicItem1.getHandle())); + parameters.add(new DSpaceCommandLineParameter("-s", "open")); List list = parameters.stream() .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter @@ -205,6 +219,7 @@ public void curateScript_InvalidTaskFile() throws Exception { LinkedList parameters = new LinkedList<>(); parameters.add(new DSpaceCommandLineParameter("-i", "all")); + parameters.add(new DSpaceCommandLineParameter("-s", "open")); parameters.add(new DSpaceCommandLineParameter("-T", "invalidTaskFile")); List list = parameters.stream() @@ -245,6 +260,7 @@ public void curateScript_validRequest_Task() throws Exception { LinkedList parameters = new LinkedList<>(); parameters.add(new DSpaceCommandLineParameter("-i", publicItem1.getHandle())); + parameters.add(new DSpaceCommandLineParameter("-s", "open")); parameters.add(new DSpaceCommandLineParameter("-t", CurationClientOptions.getTaskOptions().get(0))); List list = parameters.stream() @@ -296,6 +312,7 @@ public void curateScript_validRequest_TaskFile() throws Exception { LinkedList parameters = new LinkedList<>(); parameters.add(new DSpaceCommandLineParameter("-i", publicItem1.getHandle())); + parameters.add(new DSpaceCommandLineParameter("-s", "open")); parameters.add(new DSpaceCommandLineParameter("-T", taskFile.getAbsolutePath())); List list = parameters.stream() @@ -346,6 +363,7 @@ public void curateScript_EPersonInParametersFails() throws Exception { parameters.add(new DSpaceCommandLineParameter("-e", eperson.getEmail())); parameters.add(new DSpaceCommandLineParameter("-i", publicItem1.getHandle())); + parameters.add(new DSpaceCommandLineParameter("-s", "open")); parameters.add(new DSpaceCommandLineParameter("-t", CurationClientOptions.getTaskOptions().get(0))); List list = parameters.stream() @@ -371,6 +389,263 @@ public void curateScript_EPersonInParametersFails() throws Exception { } } + /** + * This test will create a basic structure of communities, collections and items with some local admins at each + * level and verify that the local admins can only run the curate script on their own objects + */ + @Test + public void securityCurateTest() throws Exception { + context.turnOffAuthorisationSystem(); + EPerson comAdmin = EPersonBuilder.createEPerson(context) + .withEmail("comAdmin@example.com") + .withPassword(password).build(); + EPerson colAdmin = EPersonBuilder.createEPerson(context) + .withEmail("colAdmin@example.com") + .withPassword(password).build(); + EPerson itemAdmin = EPersonBuilder.createEPerson(context) + .withEmail("itemAdmin@example.com") + .withPassword(password).build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Community") + .withAdminGroup(comAdmin) + .build(); + Community anotherCommunity = CommunityBuilder.createCommunity(context) + .withName("Another Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Collection") + .withAdminGroup(colAdmin) + .build(); + Collection anotherCollection = CollectionBuilder.createCollection(context, anotherCommunity) + .withName("AnotherCollection") + .build(); + Item item = ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin) + .withTitle("Test item to curate").build(); + Item anotherItem = ItemBuilder.createItem(context, anotherCollection) + .withTitle("Another Test item to curate").build(); + Site site = ContentServiceFactory.getInstance().getSiteService().findSite(context); + context.restoreAuthSystemState(); + LinkedList siteParameters = new LinkedList<>(); + siteParameters.add(new DSpaceCommandLineParameter("-i", site.getHandle())); + siteParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList comParameters = new LinkedList<>(); + comParameters.add(new DSpaceCommandLineParameter("-i", community.getHandle())); + comParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList anotherComParameters = new LinkedList<>(); + anotherComParameters.add(new DSpaceCommandLineParameter("-i", anotherCommunity.getHandle())); + anotherComParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList colParameters = new LinkedList<>(); + colParameters.add(new DSpaceCommandLineParameter("-i", collection.getHandle())); + colParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList anotherColParameters = new LinkedList<>(); + anotherColParameters.add(new DSpaceCommandLineParameter("-i", anotherCollection.getHandle())); + anotherColParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList itemParameters = new LinkedList<>(); + itemParameters.add(new DSpaceCommandLineParameter("-i", item.getHandle())); + itemParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList anotherItemParameters = new LinkedList<>(); + anotherItemParameters.add(new DSpaceCommandLineParameter("-i", anotherItem.getHandle())); + anotherItemParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + + String comAdminToken = getAuthToken(comAdmin.getEmail(), password); + String colAdminToken = getAuthToken(colAdmin.getEmail(), password); + String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password); + + List listCurateSite = siteParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listCom = comParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listAnotherCom = anotherComParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listCol = colParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listAnotherCol = anotherColParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listItem = itemParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listAnotherItem = anotherItemParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + String adminToken = getAuthToken(admin.getEmail(), password); + List acceptableProcessStatuses = new LinkedList<>(); + acceptableProcessStatuses.addAll(Arrays.asList(ProcessStatus.SCHEDULED, + ProcessStatus.RUNNING, + ProcessStatus.COMPLETED)); + + AtomicReference idSiteRef = new AtomicReference<>(); + AtomicReference idComRef = new AtomicReference<>(); + AtomicReference idComColRef = new AtomicReference<>(); + AtomicReference idComItemRef = new AtomicReference<>(); + AtomicReference idColRef = new AtomicReference<>(); + AtomicReference idColItemRef = new AtomicReference<>(); + AtomicReference idItemRef = new AtomicReference<>(); + + ScriptConfiguration curateScriptConfiguration = scriptService.getScriptConfiguration("curate"); + // we should be able to start the curate script with all our admins on the respective dso + try { + // start a process as general admin + getClient(adminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(admin.getID()), + siteParameters, + acceptableProcessStatuses)))) + .andDo(result -> idSiteRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + + // check with the com admin + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCom))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(comAdmin.getID()), + comParameters, + acceptableProcessStatuses)))) + .andDo(result -> idComRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the com admin should be able to run the curate also over the children collection and item + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCol))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(comAdmin.getID()), + colParameters, + acceptableProcessStatuses)))) + .andDo(result -> idComColRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listItem))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(comAdmin.getID()), + itemParameters, + acceptableProcessStatuses)))) + .andDo(result -> idComItemRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the com admin should be NOT able to run the curate over other com, col or items + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isForbidden()); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherCom))) + .andExpect(status().isForbidden()); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherCol))) + .andExpect(status().isForbidden()); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherItem))) + .andExpect(status().isForbidden()); + + // check with the col admin + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCol))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(colAdmin.getID()), + colParameters, + acceptableProcessStatuses)))) + .andDo(result -> idColRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the col admin should be able to run the curate also over the owned item + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listItem))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(colAdmin.getID()), + itemParameters, + acceptableProcessStatuses)))) + .andDo(result -> idColItemRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + + // the col admin should be NOT able to run the curate over the community nor another collection nor + // on a not owned item + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isForbidden()); + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCom))) + .andExpect(status().isForbidden()); + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherCol))) + .andExpect(status().isForbidden()); + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherItem))) + .andExpect(status().isForbidden()); + + // check with the item admin + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listItem))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(itemAdmin.getID()), + itemParameters, + acceptableProcessStatuses)))) + .andDo(result -> idItemRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the item admin should be NOT able to run the curate over the community nor the collection nor + // on a not owned item + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isForbidden()); + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCom))) + .andExpect(status().isForbidden()); + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCol))) + .andExpect(status().isForbidden()); + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherItem))) + .andExpect(status().isForbidden()); + } finally { + ProcessBuilder.deleteProcess(idSiteRef.get()); + ProcessBuilder.deleteProcess(idComRef.get()); + ProcessBuilder.deleteProcess(idComColRef.get()); + ProcessBuilder.deleteProcess(idComItemRef.get()); + ProcessBuilder.deleteProcess(idColRef.get()); + ProcessBuilder.deleteProcess(idColItemRef.get()); + ProcessBuilder.deleteProcess(idItemRef.get()); + } + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/external/provider/impl/MockDataProvider.java b/dspace-server-webapp/src/test/java/org/dspace/external/provider/impl/MockDataProvider.java index 894b8e409a4f..0a0b4f062d31 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/external/provider/impl/MockDataProvider.java +++ b/dspace-server-webapp/src/test/java/org/dspace/external/provider/impl/MockDataProvider.java @@ -85,6 +85,7 @@ public void init() throws IOException { externalDataObject.setDisplayValue(id); List list = new LinkedList<>(); list.add(new MetadataValueDTO("dc", "contributor", "author", null, "Donald, Smith")); + list.add(new MetadataValueDTO("dc", "identifier", "doi", null, "10.1016/j.procs.2017.03.031")); externalDataObject.setMetadata(list); mockLookupMap.put(id, externalDataObject); diff --git a/dspace-server-webapp/src/test/java/org/dspace/google/GoogleAsyncEventListenerIT.java b/dspace-server-webapp/src/test/java/org/dspace/google/GoogleAsyncEventListenerIT.java index 866d0fafedb3..17df839ebf1f 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/google/GoogleAsyncEventListenerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/google/GoogleAsyncEventListenerIT.java @@ -29,12 +29,16 @@ import java.util.List; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.BundleBuilder; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; import org.dspace.builder.ItemBuilder; import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; import org.dspace.content.Collection; import org.dspace.content.Item; +import org.dspace.core.Constants; import org.dspace.google.client.GoogleAnalyticsClient; import org.dspace.services.ConfigurationService; import org.junit.After; @@ -61,6 +65,8 @@ public class GoogleAsyncEventListenerIT extends AbstractControllerIntegrationTes private Bitstream bitstream; + private Item item; + private List originalGoogleAnalyticsClients; private GoogleAnalyticsClient firstGaClientMock = mock(GoogleAnalyticsClient.class); @@ -80,7 +86,7 @@ public void setup() throws Exception { .withName("Test collection") .build(); - Item item = ItemBuilder.createItem(context, collection) + item = ItemBuilder.createItem(context, collection) .withTitle("Test item") .build(); @@ -238,6 +244,104 @@ public void testOnBitstreamContentDownloadWithTooManyEvents() throws Exception { } + @Test + public void testOnBitstreamContentDownloadDefaultBundleConfig() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle licenseBundle = BundleBuilder.createBundle(context, item) + .withName(Constants.LICENSE_BUNDLE_NAME).build(); + Bitstream license = BitstreamBuilder.createBitstream(context, licenseBundle, + toInputStream("License", defaultCharset())).build(); + context.restoreAuthSystemState(); + + assertThat(getStoredEventsAsList(), empty()); + + String bitstreamUrl = "/api/core/bitstreams/" + bitstream.getID() + "/content"; + + downloadBitstreamContent("Postman", "123456", "REF"); + downloadContent("Chrome", "ABCDEFG", "REF-1", license); + + assertThat(getStoredEventsAsList(), hasSize(1)); + + List storedEvents = getStoredEventsAsList(); + + assertThat(storedEvents, contains( + event("123456", "127.0.0.1", "Postman", "REF", bitstreamUrl, "Test item")) + ); + + googleAsyncEventListener.sendCollectedEvents(); + + assertThat(getStoredEventsAsList(), empty()); + + verify(firstGaClientMock).isAnalyticsKeySupported(ANALYTICS_KEY); + verify(secondGaClientMock).isAnalyticsKeySupported(ANALYTICS_KEY); + verify(secondGaClientMock).sendEvents(ANALYTICS_KEY, storedEvents); + verifyNoMoreInteractions(firstGaClientMock, secondGaClientMock); + } + + @Test + public void testOnBitstreamContentDownloadMultipleBundleConfig() throws Exception { + configurationService.setProperty("google-analytics.bundles", + List.of(Constants.DEFAULT_BUNDLE_NAME, "CONTENT")); + + context.turnOffAuthorisationSystem(); + Bundle contentBundle = BundleBuilder.createBundle(context, item).withName("CONTENT").build(); + Bitstream content = BitstreamBuilder.createBitstream(context, contentBundle, + toInputStream("Test Content", defaultCharset())).build(); + Bundle thumbnailBundle = BundleBuilder.createBundle(context, item).withName("THUMBNAIL").build(); + Bitstream thumbnail = BitstreamBuilder.createBitstream(context, thumbnailBundle, + toInputStream("Thumbnail", defaultCharset())).build(); + context.restoreAuthSystemState(); + + assertThat(getStoredEventsAsList(), empty()); + + String bitstreamUrl = "/api/core/bitstreams/" + bitstream.getID() + "/content"; + String contentUrl = "/api/core/bitstreams/" + content.getID() + "/content"; + + downloadBitstreamContent("Postman", "123456", "REF"); + downloadContent("Chrome", "ABCDEFG", "REF-1", content); + downloadContent("Chrome", "987654", "REF-2", thumbnail); + + assertThat(getStoredEventsAsList(), hasSize(2)); + + List storedEvents = getStoredEventsAsList(); + + assertThat(storedEvents, contains( + event("123456", "127.0.0.1", "Postman", "REF", bitstreamUrl, "Test item"), + event("ABCDEFG", "127.0.0.1", "Chrome", "REF-1", contentUrl, "Test item") + )); + + googleAsyncEventListener.sendCollectedEvents(); + + assertThat(getStoredEventsAsList(), empty()); + + verify(firstGaClientMock).isAnalyticsKeySupported(ANALYTICS_KEY); + verify(secondGaClientMock).isAnalyticsKeySupported(ANALYTICS_KEY); + verify(secondGaClientMock).sendEvents(ANALYTICS_KEY, storedEvents); + verifyNoMoreInteractions(firstGaClientMock, secondGaClientMock); + } + + @Test + public void testOnBitstreamContentDownloadNoneBundleConfig() throws Exception { + configurationService.setProperty("google-analytics.bundles", "none"); + + context.turnOffAuthorisationSystem(); + Bundle contentBundle = BundleBuilder.createBundle(context, item).withName("CONTENT").build(); + Bitstream content = BitstreamBuilder.createBitstream(context, contentBundle, + toInputStream("Test Content", defaultCharset())).build(); + Bundle thumbnailBundle = BundleBuilder.createBundle(context, item).withName("THUMBNAIL").build(); + Bitstream thumbnail = BitstreamBuilder.createBitstream(context, thumbnailBundle, + toInputStream("Thumbnail", defaultCharset())).build(); + context.restoreAuthSystemState(); + + assertThat(getStoredEventsAsList(), empty()); + + downloadBitstreamContent("Postman", "123456", "REF"); + downloadContent("Chrome", "ABCDEFG", "REF-1", content); + downloadContent("Chrome", "987654", "REF-2", thumbnail); + + assertThat(getStoredEventsAsList(), empty()); + } + @SuppressWarnings("unchecked") private List getStoredEventsAsList() { List events = new ArrayList<>(); @@ -248,13 +352,18 @@ private List getStoredEventsAsList() { return events; } - private void downloadBitstreamContent(String userAgent, String correlationId, String referrer) throws Exception { + private void downloadContent(String userAgent, String correlationId, String referrer, Bitstream bit) + throws Exception { getClient(getAuthToken(admin.getEmail(), password)) - .perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content") - .header("USER-AGENT", userAgent) - .header("X-CORRELATION-ID", correlationId) - .header("X-REFERRER", referrer)) + .perform(get("/api/core/bitstreams/" + bit.getID() + "/content") + .header("USER-AGENT", userAgent) + .header("X-CORRELATION-ID", correlationId) + .header("X-REFERRER", referrer)) .andExpect(status().isOk()); } + private void downloadBitstreamContent(String userAgent, String correlationId, String referrer) throws Exception { + downloadContent(userAgent, correlationId, referrer, bitstream); + } + } diff --git a/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java b/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java index f69c0e3af762..632b4e2f83f4 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java +++ b/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java @@ -8,21 +8,13 @@ package org.dspace.scripts; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.impl.MockDSpaceRunnableScript; -import org.springframework.beans.factory.annotation.Autowired; public class MockDSpaceRunnableScriptConfiguration extends ScriptConfiguration { - - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +31,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-server-webapp/src/test/resources/application-test.properties b/dspace-server-webapp/src/test/resources/application-test.properties index 9a396cf8e5b1..bd9e2ea4a17b 100644 --- a/dspace-server-webapp/src/test/resources/application-test.properties +++ b/dspace-server-webapp/src/test/resources/application-test.properties @@ -14,4 +14,7 @@ ## Log4j2 configuration for test environment ## This file is found on classpath at src/test/resources/log4j2-test.xml -logging.config = classpath:log4j2-test.xml \ No newline at end of file +logging.config = classpath:log4j2-test.xml + +# Our integration tests expect application to be deployed at the root path (/) +server.servlet.context-path=/ \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/orcid-record.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/orcid-record.xml new file mode 100644 index 000000000000..7672e980c8bd --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/orcid-record.xml @@ -0,0 +1,270 @@ + + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + + en + + + Direct + 2023-09-19T12:25:43.445Z + 2023-10-12T14:19:06.983Z + true + true + true + + + 2023-10-12T13:28:14.550Z + + 2023-09-19T12:25:43.736Z + 2023-09-19T12:25:43.736Z + Andrea + Bollini + + + 2023-10-12T13:28:14.550Z + + 2023-10-05T07:56:29.001Z + 2023-10-12T13:28:14.550Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + isco + + + 2023-10-12T13:28:14.541Z + 2023-10-12T13:28:14.541Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + Bollini, Andrea + + + + 2023-10-12T13:27:57.187Z + + 2023-10-12T10:35:14.406Z + 2023-10-12T13:27:57.187Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + Linkedin + https://it.linkedin.com/in/andreabollini + + + 2023-10-12T13:27:57.183Z + 2023-10-12T13:27:57.183Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + 4science + https://www.4science.it/ + + + + 2023-10-12T10:38:48.105Z + + 2023-10-12T10:33:21.077Z + 2023-10-12T10:38:48.105Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + test-orcid@mailinator.com + + + + + + 2023-09-04T09:04:52.121Z + + 2023-01-13T11:20:13.803Z + 2023-01-13T11:48:02.979Z + + + https://sandbox.orcid.org/client/0000-0002-3609-4817 + 0000-0002-3609-4817 + sandbox.orcid.org + + Scopus Wizard + + Scopus Author ID + 57432999200 + http://www.scopus.com/inward/authorDetails.url?authorID=57432999200&partnerID=MN8TOARS + self + + + 2023-01-19T14:25:14.512Z + 2023-01-19T14:25:14.512Z + + + https://sandbox.orcid.org/client/0000-0002-3609-4817 + 0000-0002-3609-4817 + sandbox.orcid.org + + Scopus Wizard + + Scopus Author ID + 35233141600 + http://www.scopus.com/inward/authorDetails.url?authorID=35233141600&partnerID=MN8TOARS + self + + + + + 2023-10-12T14:19:06.992Z + + + + 2023-10-12T10:52:26.965Z + + 2023-10-12T10:52:26.965Z + + + 2023-10-12T10:52:26.965Z + 2023-10-12T10:52:26.965Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + + Dspace + + Milan + IT + + + + + + 2023-10-12T10:35:49.079Z + + + 2023-10-12T10:34:17.514Z + 2023-10-12T10:35:49.079Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + + 4Science + + Milan + IT + + + https://ror.org/03vb2cr34 + ROR + + + + + + + + + + + + + + 2023-10-12T14:19:06.992Z + + 2023-10-12T14:19:06.992Z + + + doi + 10.1016/j.procs.2014.06.008 + 10.1016/j.procs.2014.06.008 + https://doi.org/10.1016/j.procs.2014.06.008 + self + + + eid + 55484808800 + 55484808800 + self + + + + 2023-10-12T14:09:25.415Z + 2023-10-12T14:19:06.992Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + + Publication Metadata in CERIF: Inspiration by FRBR + + + + doi + 10.1016/j.procs.2014.06.008 + 10.1016/j.procs.2014.06.008 + https://doi.org/10.1016/j.procs.2014.06.008 + self + + + issn + 1877-0509 + 1877-0509 + https://portal.issn.org/resource/ISSN/1877-0509 + part-of + + + eid + 55484808800 + 55484808800 + self + + + http://dx.doi.org/10.1016/j.procs.2014.06.008 + journal-article + + 2014 + + Procedia Computer Science + + + + + \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test.xml new file mode 100644 index 000000000000..4f921658e32b --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test.xml @@ -0,0 +1,14 @@ + + + + 1 + 1 + 0 + 1 + MCID_64784b5ab65e3b2b2253cd3a + + 36708638 + + + "10 1016 j nepr 2023 103548"[All Fields] + \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test2.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test2.xml new file mode 100644 index 000000000000..1ff9570777a7 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test2.xml @@ -0,0 +1,14 @@ + + + + 1 + 1 + 0 + 1 + MCID_64784b12ccf058150336d6a8 + + 21975942 + + + "10 1002 0471142905 hg0610s71"[All Fields] + \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test.xml new file mode 100644 index 000000000000..666fb1e7d550 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test.xml @@ -0,0 +1,194 @@ + + + + + + 36708638 + + 2023 + 02 + 23 + + + 2023 + 02 + 23 + +
      + + 1873-5223 + + 67 + + 2023 + Feb + + + Nurse education in practice + Nurse Educ Pract + + Teaching strategies of clinical reasoning in advanced nursing clinical practice: A scoping review. + + 103548 + 103548 + + 10.1016/j.nepr.2023.103548 + S1471-5953(23)00010-0 + + To report and synthesize the main strategies for teaching clinical reasoning described in the literature in the context of advanced clinical practice and promote new areas of research to improve the pedagogical approach to clinical reasoning in Advanced Practice Nursing. + Clinical reasoning and clinical thinking are essential elements in the advanced nursing clinical practice decision-making process. The quality improvement of care is related to the development of those skills. Therefore, it is crucial to optimize teaching strategies that can enhance the role of clinical reasoning in advanced clinical practice. + A scoping review was conducted using the framework developed by Arksey and O'Malley as a research strategy. Consistent with the nature of scoping reviews, a study protocol has been established. + The studies included and analyzed in this scoping review cover from January 2016 to June 2022. Primary studies and secondary revision studies, published in biomedical databases, were selected, including qualitative ones. Electronic databases used were: CINAHL, PubMed, Cochrane Library, Scopus, and OVID. Three authors independently evaluated the articles for titles, abstracts, and full text. + 1433 articles were examined, applying the eligibility and exclusion criteria 73 studies were assessed for eligibility, and 27 were included in the scoping review. The results that emerged from the review were interpreted and grouped into three macro strategies (simulations-based education, art and visual thinking, and other learning approaches) and nineteen educational interventions. + Among the different strategies, the simulations are the most used. Despite this, our scoping review reveals that is necessary to use different teaching strategies to stimulate critical thinking, improve diagnostic reasoning, refine clinical judgment, and strengthen decision-making. However, it is not possible to demonstrate which methodology is more effective in obtaining the learning outcomes necessary to acquire an adequate level of judgment and critical thinking. Therefore, it will be necessary to relate teaching methodologies with the skills developed. + Copyright © 2023 Elsevier Ltd. All rights reserved. + + + + Giuffrida + Silvia + S + + Department of Cardiology and Cardiac Surgery, Cardio Centro Ticino Institute, Ente Ospedaliero Cantonale, Lugano, Switzerland. Electronic address: silvia.giuffrida@eoc.ch. + + + + Silano + Verdiana + V + + Nursing Direction of Settore Anziani Città di Bellinzona, Bellinzona, Switzerland. Electronic address: verdiana.silano@hotmail.it. + + + + Ramacciati + Nicola + N + + Department of Pharmacy, Health and Nutritional Sciences (DFSSN), University of Calabria, Rende, Italy. Electronic address: nicola.ramacciati@unical.it. + + + + Prandi + Cesarina + C + + Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: cesarina.prandi@supsi.ch. + + + + Baldon + Alessia + A + + Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: alessia.baldon@supsi.ch. + + + + Bianchi + Monica + M + + Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: monica.bianchi@supsi.ch. + + + + eng + + Journal Article + Review + + + 2023 + 01 + 17 + +
      + + Scotland + Nurse Educ Pract + 101090848 + 1471-5953 + + IM + + + Humans + + + Advanced Practice Nursing + + + Learning + + + Curriculum + + + Thinking + + + Clinical Reasoning + + + Students, Nursing + + + + Advanced practice nursing + Clinical reasoning + Critical thinking + Educational strategies + Nursing education + Teaching methodology + + Declaration of Competing Interest The authors declare that they have no known competing financial interests or personal relationships that could have appeared to influence the work reported in this paper. +
      + + + + 2022 + 11 + 9 + + + 2022 + 12 + 17 + + + 2023 + 1 + 10 + + + 2023 + 1 + 29 + 6 + 0 + + + 2023 + 2 + 25 + 6 + 0 + + + 2023 + 1 + 28 + 18 + 7 + + + ppublish + + 36708638 + 10.1016/j.nepr.2023.103548 + S1471-5953(23)00010-0 + + +
      +
      \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test2.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test2.xml new file mode 100644 index 000000000000..949d3b1250b2 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test2.xml @@ -0,0 +1,132 @@ + + + + + + 21975942 + + 2012 + 01 + 13 + + + 2016 + 10 + 21 + +
      + + 1934-8258 + + Chapter 6 + + 2011 + Oct + + + Current protocols in human genetics + Curr Protoc Hum Genet + + Searching NCBI Databases Using Entrez. + + Unit6.10 + Unit6.10 + + 10.1002/0471142905.hg0610s71 + + One of the most widely used interfaces for the retrieval of information from biological databases is the NCBI Entrez system. Entrez capitalizes on the fact that there are pre-existing, logical relationships between the individual entries found in numerous public databases. The existence of such natural connections, mostly biological in nature, argued for the development of a method through which all the information about a particular biological entity could be found without having to sequentially visit and query disparate databases. Two basic protocols describe simple, text-based searches, illustrating the types of information that can be retrieved through the Entrez system. An alternate protocol builds upon the first basic protocol, using additional, built-in features of the Entrez system, and providing alternative ways to issue the initial query. The support protocol reviews how to save frequently issued queries. Finally, Cn3D, a structure visualization tool, is also discussed. + © 2011 by John Wiley & Sons, Inc. + + + + Gibney + Gretchen + G + + + Baxevanis + Andreas D + AD + + + eng + + Journal Article + +
      + + United States + Curr Protoc Hum Genet + 101287858 + 1934-8258 + + IM + + + Animals + + + Database Management Systems + + + Databases, Factual + + + Humans + + + Information Storage and Retrieval + methods + + + Internet + + + Molecular Conformation + + + National Library of Medicine (U.S.) + + + PubMed + + + United States + + + User-Computer Interface + + +
      + + + + 2011 + 10 + 7 + 6 + 0 + + + 2011 + 10 + 7 + 6 + 0 + + + 2012 + 1 + 14 + 6 + 0 + + + ppublish + + 21975942 + 10.1002/0471142905.hg0610s71 + + +
      +
      \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/ror-record.json b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/ror-record.json new file mode 100644 index 000000000000..51924485b347 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/ror-record.json @@ -0,0 +1,107 @@ +{ + "id": "https://ror.org/01sps7q28", + "name": "The University of Texas Health Science Center at Tyler", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1947, + "types": [ + "Healthcare" + ], + "relationships": [ + { + "label": "The University of Texas System", + "type": "Parent", + "id": "https://ror.org/01gek1696" + } + ], + "addresses": [ + { + "lat": 32.426014, + "lng": -95.212728, + "state": "Texas", + "state_code": "US-TX", + "city": "Tyler", + "geonames_city": { + "id": 4738214, + "city": "Tyler", + "geonames_admin1": { + "name": "Texas", + "id": 4736286, + "ascii_name": "Texas", + "code": "US.TX" + }, + "geonames_admin2": { + "name": "Smith County", + "id": 4729130, + "ascii_name": "Smith County", + "code": "US.TX.423" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "https://www.utsystem.edu/institutions/university-texas-health-science-center-tyler" + ], + "aliases": [ + "East Texas Tuberculosis Sanitarium", + "UT Health Northeast" + ], + "acronyms": [ + "UTHSCT" + ], + "status": "active", + "wikipedia_url": "https://en.wikipedia.org/wiki/University_of_Texas_Health_Science_Center_at_Tyler", + "labels": [ + + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0000 9704 5790" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "3446655" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q7896437" + ] + }, + "GRID": { + "preferred": "grid.267310.1", + "all": "grid.267310.1" + } + } +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/ror-records.json b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/ror-records.json new file mode 100644 index 000000000000..91ce8d33e084 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/ror-records.json @@ -0,0 +1,2383 @@ +{ + "number_of_results": 200, + "time_taken": 12, + "items": [ + { + "id": "https://ror.org/02f6dcw23", + "name": "The University of Texas", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1959, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "Audie L. Murphy Memorial VA Hospital", + "type": "Related", + "id": "https://ror.org/035xhk118" + }, + { + "label": "San Antonio Military Medical Center", + "type": "Related", + "id": "https://ror.org/00m1mwc36" + }, + { + "label": "The University of Texas System", + "type": "Parent", + "id": "https://ror.org/01gek1696" + } + ], + "addresses": [ + { + "lat": 29.508129, + "lng": -98.574025, + "state": "Texas", + "state_code": "US-TX", + "city": "San Antonio", + "geonames_city": { + "id": 4726206, + "city": "San Antonio", + "geonames_admin1": { + "name": "Texas", + "id": 4736286, + "ascii_name": "Texas", + "code": "US.TX" + }, + "geonames_admin2": { + "name": "Bexar County", + "id": 4674023, + "ascii_name": "Bexar County", + "code": "US.TX.029" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://www.uthscsa.edu/" + ], + "aliases": [ + + ], + "acronyms": [ + "UTHSCSA" + ], + "status": "active", + "wikipedia_url": "https://en.wikipedia.org/wiki/University_of_Texas_Health_Science_Center_at_San_Antonio", + "labels": [ + + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0001 0629 5880" + ] + }, + "FundRef": { + "preferred": "100008635", + "all": [ + "100008635", + "100008636" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "1593427" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q4005868" + ] + }, + "GRID": { + "preferred": "grid.267309.9", + "all": "grid.267309.9" + } + } + }, + { + "id": "https://ror.org/01sps7q28", + "name": "The University of Texas Health Science Center at Tyler", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1947, + "types": [ + "Healthcare" + ], + "relationships": [ + { + "label": "The University of Texas System", + "type": "Parent", + "id": "https://ror.org/01gek1696" + } + ], + "addresses": [ + { + "lat": 32.426014, + "lng": -95.212728, + "state": "Texas", + "state_code": "US-TX", + "city": "Tyler", + "geonames_city": { + "id": 4738214, + "city": "Tyler", + "geonames_admin1": { + "name": "Texas", + "id": 4736286, + "ascii_name": "Texas", + "code": "US.TX" + }, + "geonames_admin2": { + "name": "Smith County", + "id": 4729130, + "ascii_name": "Smith County", + "code": "US.TX.423" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "https://www.utsystem.edu/institutions/university-texas-health-science-center-tyler" + ], + "aliases": [ + "East Texas Tuberculosis Sanitarium", + "UT Health Northeast" + ], + "acronyms": [ + "UTHSCT" + ], + "status": "active", + "wikipedia_url": "https://en.wikipedia.org/wiki/University_of_Texas_Health_Science_Center_at_Tyler", + "labels": [ + + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0000 9704 5790" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "3446655" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q7896437" + ] + }, + "GRID": { + "preferred": "grid.267310.1", + "all": "grid.267310.1" + } + } + }, + { + "id": "https://ror.org/05byvp690", + "name": "The University of Texas Southwestern Medical Center", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1943, + "types": [ + "Healthcare" + ], + "relationships": [ + { + "label": "Children's Medical Center", + "type": "Related", + "id": "https://ror.org/02ndk3y82" + }, + { + "label": "Parkland Memorial Hospital", + "type": "Related", + "id": "https://ror.org/0208r0146" + }, + { + "label": "VA North Texas Health Care System", + "type": "Related", + "id": "https://ror.org/01nzxq896" + }, + { + "label": "The University of Texas System", + "type": "Parent", + "id": "https://ror.org/01gek1696" + }, + { + "label": "Institute for Exercise and Environmental Medicine", + "type": "Child", + "id": "https://ror.org/03gqc7y13" + }, + { + "label": "Texas Health Dallas", + "type": "Child", + "id": "https://ror.org/05k07p323" + } + ], + "addresses": [ + { + "lat": 32.812185, + "lng": -96.840174, + "state": "Texas", + "state_code": "US-TX", + "city": "Dallas", + "geonames_city": { + "id": 4684888, + "city": "Dallas", + "geonames_admin1": { + "name": "Texas", + "id": 4736286, + "ascii_name": "Texas", + "code": "US.TX" + }, + "geonames_admin2": { + "name": "Dallas County", + "id": 4684904, + "ascii_name": "Dallas County", + "code": "US.TX.113" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://www.utsouthwestern.edu/" + ], + "aliases": [ + "UT Southwestern" + ], + "acronyms": [ + + ], + "status": "active", + "wikipedia_url": "https://en.wikipedia.org/wiki/University_of_Texas_Southwestern_Medical_Center", + "labels": [ + + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0000 9482 7121" + ] + }, + "FundRef": { + "preferred": "100007914", + "all": [ + "100007914", + "100010487", + "100008260" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "617906" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q2725999" + ] + }, + "GRID": { + "preferred": "grid.267313.2", + "all": "grid.267313.2" + } + } + }, + { + "id": "https://ror.org/019kgqr73", + "name": "The University of Texas at Arlington", + "email_address": "", + "ip_addresses": [ + + ], + "established": 1895, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "VA North Texas Health Care System", + "type": "Related", + "id": "https://ror.org/01nzxq896" + }, + { + "label": "The University of Texas System", + "type": "Parent", + "id": "https://ror.org/01gek1696" + } + ], + "addresses": [ + { + "lat": 32.731, + "lng": -97.115, + "state": "Texas", + "state_code": "US-TX", + "city": "Arlington", + "geonames_city": { + "id": 4671240, + "city": "Arlington", + "geonames_admin1": { + "name": "Texas", + "id": 4736286, + "ascii_name": "Texas", + "code": "US.TX" + }, + "geonames_admin2": { + "name": "Tarrant County", + "id": 4735638, + "ascii_name": "Tarrant County", + "code": "US.TX.439" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://www.uta.edu/uta/" + ], + "aliases": [ + "UT Arlington" + ], + "acronyms": [ + "UTA" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Texas_at_Arlington", + "labels": [ + { + "label": "Université du Texas à Arlington", + "iso639": "fr" + } + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0001 2181 9515" + ] + }, + "FundRef": { + "preferred": null, + "all": [ + "100009497" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "906409" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q1230739" + ] + }, + "GRID": { + "preferred": "grid.267315.4", + "all": "grid.267315.4" + } + } + }, + { + "id": "https://ror.org/051smbs96", + "name": "The University of Texas of the Permian Basin", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1973, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "The University of Texas System", + "type": "Parent", + "id": "https://ror.org/01gek1696" + } + ], + "addresses": [ + { + "lat": 31.889444, + "lng": -102.329531, + "state": "Texas", + "state_code": "US-TX", + "city": "Odessa", + "geonames_city": { + "id": 5527554, + "city": "Odessa", + "geonames_admin1": { + "name": "Texas", + "id": 4736286, + "ascii_name": "Texas", + "code": "US.TX" + }, + "geonames_admin2": { + "name": "Ector County", + "id": 5520910, + "ascii_name": "Ector County", + "code": "US.TX.135" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://www.utpb.edu/" + ], + "aliases": [ + "UT Permian Basin" + ], + "acronyms": [ + "UTPB" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Texas_of_the_Permian_Basin", + "labels": [ + + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0000 9140 1491" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "1419441" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q2495935" + ] + }, + "GRID": { + "preferred": "grid.267328.a", + "all": "grid.267328.a" + } + } + }, + { + "id": "https://ror.org/044vy1d05", + "name": "Tokushima University", + "email_address": "", + "ip_addresses": [ + + ], + "established": 1949, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "Tokushima University Hospital", + "type": "Related", + "id": "https://ror.org/021ph5e41" + } + ], + "addresses": [ + { + "lat": 34.07, + "lng": 134.56, + "state": null, + "state_code": null, + "city": "Tokushima", + "geonames_city": { + "id": 1850158, + "city": "Tokushima", + "geonames_admin1": { + "name": "Tokushima", + "id": 1850157, + "ascii_name": "Tokushima", + "code": "JP.39" + }, + "geonames_admin2": { + "name": "Tokushima Shi", + "id": 1850156, + "ascii_name": "Tokushima Shi", + "code": "JP.39.1850156" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 1861060 + } + ], + "links": [ + "https://www.tokushima-u.ac.jp/" + ], + "aliases": [ + "Tokushima Daigaku", + "University of Tokushima" + ], + "acronyms": [ + + ], + "status": "active", + "wikipedia_url": "https://en.wikipedia.org/wiki/University_of_Tokushima", + "labels": [ + { + "label": "徳島大学", + "iso639": "ja" + } + ], + "country": { + "country_name": "Japan", + "country_code": "JP" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0001 1092 3579" + ] + }, + "FundRef": { + "preferred": null, + "all": [ + "501100005623" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "15696836" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q1150231" + ] + }, + "GRID": { + "preferred": "grid.267335.6", + "all": "grid.267335.6" + } + } + }, + { + "id": "https://ror.org/03np13864", + "name": "University of Trinidad and Tobago", + "email_address": null, + "ip_addresses": [ + + ], + "established": 2004, + "types": [ + "Education" + ], + "relationships": [ + + ], + "addresses": [ + { + "lat": 10.616667, + "lng": -61.216667, + "state": null, + "state_code": null, + "city": "Arima", + "geonames_city": { + "id": 3575051, + "city": "Arima", + "geonames_admin1": { + "name": "Borough of Arima", + "id": 3575052, + "ascii_name": "Borough of Arima", + "code": "TT.01" + }, + "geonames_admin2": { + "name": null, + "id": null, + "ascii_name": null, + "code": null + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 3573591 + } + ], + "links": [ + "https://utt.edu.tt/" + ], + "aliases": [ + + ], + "acronyms": [ + "UTT" + ], + "status": "active", + "wikipedia_url": "https://en.wikipedia.org/wiki/University_of_Trinidad_and_Tobago", + "labels": [ + { + "label": "Universidad de Trinidad y Tobago", + "iso639": "es" + } + ], + "country": { + "country_name": "Trinidad and Tobago", + "country_code": "TT" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0000 9490 0886" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "8706288" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q648244" + ] + }, + "GRID": { + "preferred": "grid.267355.0", + "all": "grid.267355.0" + } + } + }, + { + "id": "https://ror.org/04wn28048", + "name": "University of Tulsa", + "email_address": "", + "ip_addresses": [ + + ], + "established": 1894, + "types": [ + "Education" + ], + "relationships": [ + + ], + "addresses": [ + { + "lat": 36.152222, + "lng": -95.946389, + "state": "Oklahoma", + "state_code": "US-OK", + "city": "Tulsa", + "geonames_city": { + "id": 4553433, + "city": "Tulsa", + "geonames_admin1": { + "name": "Oklahoma", + "id": 4544379, + "ascii_name": "Oklahoma", + "code": "US.OK" + }, + "geonames_admin2": { + "name": "Tulsa County", + "id": 4553440, + "ascii_name": "Tulsa County", + "code": "US.OK.143" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://utulsa.edu/" + ], + "aliases": [ + + ], + "acronyms": [ + "TU" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Tulsa", + "labels": [ + { + "label": "Université de tulsa", + "iso639": "fr" + } + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0001 2160 264X" + ] + }, + "FundRef": { + "preferred": "100007147", + "all": [ + "100007147", + "100006455" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "32043" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q1848657" + ] + }, + "GRID": { + "preferred": "grid.267360.6", + "all": "grid.267360.6" + } + } + }, + { + "id": "https://ror.org/04scfb908", + "name": "Alfred Health", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1871, + "types": [ + "Healthcare" + ], + "relationships": [ + { + "label": "Caulfield Hospital", + "type": "Child", + "id": "https://ror.org/01fcxf261" + }, + { + "label": "Melbourne Sexual Health Centre", + "type": "Child", + "id": "https://ror.org/013fdz725" + }, + { + "label": "National Trauma Research Institute", + "type": "Child", + "id": "https://ror.org/048t93218" + }, + { + "label": "The Alfred Hospital", + "type": "Child", + "id": "https://ror.org/01wddqe20" + } + ], + "addresses": [ + { + "lat": -37.845542, + "lng": 144.981632, + "state": "Victoria", + "state_code": "AU-VIC", + "city": "Melbourne", + "geonames_city": { + "id": 2158177, + "city": "Melbourne", + "geonames_admin1": { + "name": "Victoria", + "id": 2145234, + "ascii_name": "Victoria", + "code": "AU.07" + }, + "geonames_admin2": { + "name": "Melbourne", + "id": 7839805, + "ascii_name": "Melbourne", + "code": "AU.07.24600" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 2077456 + } + ], + "links": [ + "http://www.alfred.org.au/" + ], + "aliases": [ + + ], + "acronyms": [ + + ], + "status": "active", + "wikipedia_url": "", + "labels": [ + + ], + "country": { + "country_name": "Australia", + "country_code": "AU" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0004 0432 5259" + ] + }, + "FundRef": { + "preferred": null, + "all": [ + "501100002716" + ] + }, + "GRID": { + "preferred": "grid.267362.4", + "all": "grid.267362.4" + } + } + }, + { + "id": "https://ror.org/02c2f8975", + "name": "University of Ulsan", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1970, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "Ulsan University Hospital", + "type": "Related", + "id": "https://ror.org/03sab2a45" + } + ], + "addresses": [ + { + "lat": 35.542772, + "lng": 129.256725, + "state": null, + "state_code": null, + "city": "Ulsan", + "geonames_city": { + "id": 1833747, + "city": "Ulsan", + "geonames_admin1": { + "name": "Ulsan", + "id": 1833742, + "ascii_name": "Ulsan", + "code": "KR.21" + }, + "geonames_admin2": { + "name": null, + "id": null, + "ascii_name": null, + "code": null + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 1835841 + } + ], + "links": [ + "http://en.ulsan.ac.kr/contents/main/" + ], + "aliases": [ + + ], + "acronyms": [ + "UOU" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Ulsan", + "labels": [ + { + "label": "울산대학교", + "iso639": "ko" + } + ], + "country": { + "country_name": "South Korea", + "country_code": "KR" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0004 0533 4667" + ] + }, + "FundRef": { + "preferred": null, + "all": [ + "501100002568" + ] + }, + "OrgRef": { + "preferred": "10458246", + "all": [ + "10458246", + "15162872" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q491717" + ] + }, + "GRID": { + "preferred": "grid.267370.7", + "all": "grid.267370.7" + } + } + }, + { + "id": "https://ror.org/010acrp16", + "name": "University of West Alabama", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1835, + "types": [ + "Education" + ], + "relationships": [ + + ], + "addresses": [ + { + "lat": 32.59, + "lng": -88.186, + "state": "Alabama", + "state_code": "US-AL", + "city": "Livingston", + "geonames_city": { + "id": 4073383, + "city": "Livingston", + "geonames_admin1": { + "name": "Alabama", + "id": 4829764, + "ascii_name": "Alabama", + "code": "US.AL" + }, + "geonames_admin2": { + "name": "Sumter County", + "id": 4092386, + "ascii_name": "Sumter County", + "code": "US.AL.119" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://www.uwa.edu/" + ], + "aliases": [ + "Livingston Female Academy" + ], + "acronyms": [ + "UWA" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_West_Alabama", + "labels": [ + + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0000 9963 9197" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "2425212" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q637346" + ] + }, + "GRID": { + "preferred": "grid.267434.0", + "all": "grid.267434.0" + } + } + }, + { + "id": "https://ror.org/002w4zy91", + "name": "University of West Florida", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1963, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "State University System of Florida", + "type": "Parent", + "id": "https://ror.org/05sqd3t97" + } + ], + "addresses": [ + { + "lat": 30.549493, + "lng": -87.21812, + "state": "Florida", + "state_code": "US-FL", + "city": "Pensacola", + "geonames_city": { + "id": 4168228, + "city": "Pensacola", + "geonames_admin1": { + "name": "Florida", + "id": 4155751, + "ascii_name": "Florida", + "code": "US.FL" + }, + "geonames_admin2": { + "name": "Escambia County", + "id": 4154550, + "ascii_name": "Escambia County", + "code": "US.FL.033" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://uwf.edu/" + ], + "aliases": [ + + ], + "acronyms": [ + "UWF" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_West_Florida", + "labels": [ + + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0001 2112 2427" + ] + }, + "FundRef": { + "preferred": null, + "all": [ + "100009842" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "750756" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q659255" + ] + }, + "GRID": { + "preferred": "grid.267436.2", + "all": "grid.267436.2" + } + } + }, + { + "id": "https://ror.org/01cqxk816", + "name": "University of West Georgia", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1906, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "University System of Georgia", + "type": "Parent", + "id": "https://ror.org/017wcm924" + } + ], + "addresses": [ + { + "lat": 33.573357, + "lng": -85.099593, + "state": "Georgia", + "state_code": "US-GA", + "city": "Carrollton", + "geonames_city": { + "id": 4186416, + "city": "Carrollton", + "geonames_admin1": { + "name": "Georgia", + "id": 4197000, + "ascii_name": "Georgia", + "code": "US.GA" + }, + "geonames_admin2": { + "name": "Carroll County", + "id": 4186396, + "ascii_name": "Carroll County", + "code": "US.GA.045" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://www.westga.edu/" + ], + "aliases": [ + + ], + "acronyms": [ + "UWG" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_West_Georgia", + "labels": [ + + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0001 2223 6696" + ] + }, + "FundRef": { + "preferred": null, + "all": [ + "100007922" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "595315" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q2495945" + ] + }, + "GRID": { + "preferred": "grid.267437.3", + "all": "grid.267437.3" + } + } + }, + { + "id": "https://ror.org/03c8vvr84", + "name": "University of Western States", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1904, + "types": [ + "Education" + ], + "relationships": [ + + ], + "addresses": [ + { + "lat": 45.543351, + "lng": -122.523973, + "state": "Oregon", + "state_code": "US-OR", + "city": "Portland", + "geonames_city": { + "id": 5746545, + "city": "Portland", + "geonames_admin1": { + "name": "Oregon", + "id": 5744337, + "ascii_name": "Oregon", + "code": "US.OR" + }, + "geonames_admin2": { + "name": "Multnomah County", + "id": 5742126, + "ascii_name": "Multnomah County", + "code": "US.OR.051" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://www.uws.edu/" + ], + "aliases": [ + "Western States Chiropractic College" + ], + "acronyms": [ + "UWS" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Western_States", + "labels": [ + + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0004 0455 9493" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "1655050" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q7896612" + ] + }, + "GRID": { + "preferred": "grid.267451.3", + "all": "grid.267451.3" + } + } + }, + { + "id": "https://ror.org/03fmjzx88", + "name": "University of Winchester", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1840, + "types": [ + "Education" + ], + "relationships": [ + + ], + "addresses": [ + { + "lat": 51.060338, + "lng": -1.325418, + "state": null, + "state_code": null, + "city": "Winchester", + "geonames_city": { + "id": 2633858, + "city": "Winchester", + "geonames_admin1": { + "name": "England", + "id": 6269131, + "ascii_name": "England", + "code": "GB.ENG" + }, + "geonames_admin2": { + "name": "Hampshire", + "id": 2647554, + "ascii_name": "Hampshire", + "code": "GB.ENG.F2" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": "SOUTH EAST (ENGLAND)", + "code": "UKJ" + }, + "nuts_level2": { + "name": "Hampshire and Isle of Wight", + "code": "UKJ3" + }, + "nuts_level3": { + "name": "Central Hampshire", + "code": "UKJ36" + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 2635167 + } + ], + "links": [ + "http://www.winchester.ac.uk/pages/home.aspx" + ], + "aliases": [ + + ], + "acronyms": [ + + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Winchester", + "labels": [ + + ], + "country": { + "country_name": "United Kingdom", + "country_code": "GB" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0000 9422 2878" + ] + }, + "FundRef": { + "preferred": null, + "all": [ + "100010057" + ] + }, + "HESA": { + "preferred": null, + "all": [ + "0021" + ] + }, + "UCAS": { + "preferred": null, + "all": [ + "W76" + ] + }, + "UKPRN": { + "preferred": null, + "all": [ + "10003614" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "3140939" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q3551690" + ] + }, + "GRID": { + "preferred": "grid.267454.6", + "all": "grid.267454.6" + } + } + }, + { + "id": "https://ror.org/01gw3d370", + "name": "University of Windsor", + "email_address": "", + "ip_addresses": [ + + ], + "established": 1857, + "types": [ + "Education" + ], + "relationships": [ + + ], + "addresses": [ + { + "lat": 42.305196, + "lng": -83.067483, + "state": "Ontario", + "state_code": "CA-ON", + "city": "Windsor", + "geonames_city": { + "id": 6182962, + "city": "Windsor", + "geonames_admin1": { + "name": "Ontario", + "id": 6093943, + "ascii_name": "Ontario", + "code": "CA.08" + }, + "geonames_admin2": { + "name": null, + "id": null, + "ascii_name": null, + "code": null + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6251999 + } + ], + "links": [ + "http://www.uwindsor.ca/" + ], + "aliases": [ + "UWindsor", + "Assumption University of Windsor" + ], + "acronyms": [ + + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Windsor", + "labels": [ + { + "label": "Université de windsor", + "iso639": "fr" + } + ], + "country": { + "country_name": "Canada", + "country_code": "CA" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0004 1936 9596" + ] + }, + "FundRef": { + "preferred": "100009154", + "all": [ + "100009154", + "501100000083" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "342733" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q2065769" + ] + }, + "GRID": { + "preferred": "grid.267455.7", + "all": "grid.267455.7" + } + } + }, + { + "id": "https://ror.org/02gdzyx04", + "name": "University of Winnipeg", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1871, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "Winnipeg Institute for Theoretical Physics", + "type": "Child", + "id": "https://ror.org/010tw2j24" + } + ], + "addresses": [ + { + "lat": 49.890122, + "lng": -97.153367, + "state": "Manitoba", + "state_code": "CA-MB", + "city": "Winnipeg", + "geonames_city": { + "id": 6183235, + "city": "Winnipeg", + "geonames_admin1": { + "name": "Manitoba", + "id": 6065171, + "ascii_name": "Manitoba", + "code": "CA.03" + }, + "geonames_admin2": { + "name": null, + "id": null, + "ascii_name": null, + "code": null + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6251999 + } + ], + "links": [ + "http://www.uwinnipeg.ca/" + ], + "aliases": [ + + ], + "acronyms": [ + + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Winnipeg", + "labels": [ + { + "label": "Université de winnipeg", + "iso639": "fr" + } + ], + "country": { + "country_name": "Canada", + "country_code": "CA" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0001 1703 4731" + ] + }, + "FundRef": { + "preferred": null, + "all": [ + "100009367" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "587404" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q472167" + ] + }, + "GRID": { + "preferred": "grid.267457.5", + "all": "grid.267457.5" + } + } + }, + { + "id": "https://ror.org/03mnm0t94", + "name": "University of Wisconsin–Eau Claire", + "email_address": "", + "ip_addresses": [ + + ], + "established": 1916, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "University of Wisconsin System", + "type": "Parent", + "id": "https://ror.org/03ydkyb10" + } + ], + "addresses": [ + { + "lat": 44.79895, + "lng": -91.499346, + "state": "Wisconsin", + "state_code": "US-WI", + "city": "Eau Claire", + "geonames_city": { + "id": 5251436, + "city": "Eau Claire", + "geonames_admin1": { + "name": "Wisconsin", + "id": 5279468, + "ascii_name": "Wisconsin", + "code": "US.WI" + }, + "geonames_admin2": { + "name": "Eau Claire County", + "id": 5251439, + "ascii_name": "Eau Claire County", + "code": "US.WI.035" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://www.uwec.edu/" + ], + "aliases": [ + + ], + "acronyms": [ + "UWEC" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Wisconsin%E2%80%93Eau_Claire", + "labels": [ + { + "label": "Université du Wisconsin à Eau Claire", + "iso639": "fr" + } + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0001 2227 2494" + ] + }, + "FundRef": { + "preferred": null, + "all": [ + "100010315" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "496729" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q3551771" + ] + }, + "GRID": { + "preferred": "grid.267460.1", + "all": "grid.267460.1" + } + } + }, + { + "id": "https://ror.org/05hbexn54", + "name": "University of Wisconsin–Green Bay", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1965, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "University of Wisconsin System", + "type": "Parent", + "id": "https://ror.org/03ydkyb10" + } + ], + "addresses": [ + { + "lat": 44.533203, + "lng": -87.921521, + "state": "Wisconsin", + "state_code": "US-WI", + "city": "Green Bay", + "geonames_city": { + "id": 5254962, + "city": "Green Bay", + "geonames_admin1": { + "name": "Wisconsin", + "id": 5279468, + "ascii_name": "Wisconsin", + "code": "US.WI" + }, + "geonames_admin2": { + "name": "Brown County", + "id": 5246898, + "ascii_name": "Brown County", + "code": "US.WI.009" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://www.uwgb.edu/" + ], + "aliases": [ + + ], + "acronyms": [ + "UWGB" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Wisconsin%E2%80%93Green_Bay", + "labels": [ + { + "label": "Université du Wisconsin–Green Bay", + "iso639": "fr" + } + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0001 0559 7692" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "1513886" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q2378091" + ] + }, + "GRID": { + "preferred": "grid.267461.0", + "all": "grid.267461.0" + } + } + }, + { + "id": "https://ror.org/00x8ccz20", + "name": "University of Wisconsin–La Crosse", + "email_address": "", + "ip_addresses": [ + + ], + "established": 1909, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "University of Wisconsin System", + "type": "Parent", + "id": "https://ror.org/03ydkyb10" + } + ], + "addresses": [ + { + "lat": 43.815576, + "lng": -91.233517, + "state": "Wisconsin", + "state_code": "US-WI", + "city": "La Crosse", + "geonames_city": { + "id": 5258957, + "city": "La Crosse", + "geonames_admin1": { + "name": "Wisconsin", + "id": 5279468, + "ascii_name": "Wisconsin", + "code": "US.WI" + }, + "geonames_admin2": { + "name": "La Crosse County", + "id": 5258961, + "ascii_name": "La Crosse County", + "code": "US.WI.063" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://www.uwlax.edu/Home/Future-Students/" + ], + "aliases": [ + + ], + "acronyms": [ + "UW–L" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Wisconsin%E2%80%93La_Crosse", + "labels": [ + { + "label": "Université du Wisconsin–La Crosse", + "iso639": "fr" + } + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0001 2169 5137" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "2422287" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q2688358" + ] + }, + "GRID": { + "preferred": "grid.267462.3", + "all": "grid.267462.3" + } + } + } + ], + "meta": { + "types": [ + { + "id": "company", + "title": "Company", + "count": 29790 + }, + { + "id": "education", + "title": "Education", + "count": 20325 + }, + { + "id": "nonprofit", + "title": "Nonprofit", + "count": 14187 + }, + { + "id": "healthcare", + "title": "Healthcare", + "count": 13107 + }, + { + "id": "facility", + "title": "Facility", + "count": 10080 + }, + { + "id": "other", + "title": "Other", + "count": 8369 + }, + { + "id": "government", + "title": "Government", + "count": 6511 + }, + { + "id": "archive", + "title": "Archive", + "count": 2967 + } + ], + "countries": [ + { + "id": "us", + "title": "United States", + "count": 31196 + }, + { + "id": "gb", + "title": "United Kingdom", + "count": 7410 + }, + { + "id": "de", + "title": "Germany", + "count": 5189 + }, + { + "id": "cn", + "title": "China", + "count": 4846 + }, + { + "id": "fr", + "title": "France", + "count": 4344 + }, + { + "id": "jp", + "title": "Japan", + "count": 3940 + }, + { + "id": "ca", + "title": "Canada", + "count": 3392 + }, + { + "id": "in", + "title": "India", + "count": 3075 + }, + { + "id": "cz", + "title": "Czech Republic", + "count": 2780 + }, + { + "id": "ru", + "title": "Russia", + "count": 2109 + } + ], + "statuses": [ + { + "id": "active", + "title": "active", + "count": 105336 + } + ] + } +} \ No newline at end of file diff --git a/dspace-services/pom.xml b/dspace-services/pom.xml index 8cb88f45781d..8b67a90a8ea1 100644 --- a/dspace-services/pom.xml +++ b/dspace-services/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 .. @@ -154,5 +154,17 @@ ${spring-boot.version} + + com.google.guava + guava + + + + org.checkerframework + checker-qual + + + + diff --git a/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java b/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java index afd1627f5ee3..6cffa7ee66d5 100644 --- a/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java +++ b/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java @@ -7,6 +7,8 @@ */ package org.dspace.servicemanager; +import static org.apache.logging.log4j.Level.DEBUG; + import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; @@ -21,6 +23,8 @@ import javax.annotation.PreDestroy; import org.apache.commons.lang3.ArrayUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.kernel.Activator; import org.dspace.kernel.config.SpringLoader; import org.dspace.kernel.mixins.ConfigChangeListener; @@ -28,8 +32,7 @@ import org.dspace.kernel.mixins.ServiceManagerReadyAware; import org.dspace.servicemanager.config.DSpaceConfigurationService; import org.dspace.servicemanager.spring.DSpaceBeanFactoryPostProcessor; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.dspace.utils.CallStackUtils; import org.springframework.beans.BeansException; import org.springframework.beans.factory.ListableBeanFactory; import org.springframework.beans.factory.NoSuchBeanDefinitionException; @@ -44,7 +47,7 @@ */ public final class DSpaceServiceManager implements ServiceManagerSystem { - private static Logger log = LoggerFactory.getLogger(DSpaceServiceManager.class); + private static Logger log = LogManager.getLogger(); public static final String CONFIG_PATH = "spring/spring-dspace-applicationContext.xml"; public static final String CORE_RESOURCE_PATH = "classpath*:spring/spring-dspace-core-services.xml"; @@ -426,9 +429,10 @@ public T getServiceByName(String name, Class type) { service = (T) applicationContext.getBean(name, type); } catch (BeansException e) { // no luck, try the fall back option - log.warn( + log.debug( "Unable to locate bean by name or id={}." - + " Will try to look up bean by type next.", name, e); + + " Will try to look up bean by type next.", name); + CallStackUtils.logCaller(log, DEBUG); service = null; } } else { @@ -437,8 +441,9 @@ public T getServiceByName(String name, Class type) { service = (T) applicationContext.getBean(type.getName(), type); } catch (BeansException e) { // no luck, try the fall back option - log.warn("Unable to locate bean by name or id={}." - + " Will try to look up bean by type next.", type.getName(), e); + log.debug("Unable to locate bean by name or id={}." + + " Will try to look up bean by type next.", type::getName); + CallStackUtils.logCaller(log, DEBUG); service = null; } } diff --git a/dspace-services/src/main/java/org/dspace/services/EventService.java b/dspace-services/src/main/java/org/dspace/services/EventService.java index 92080f0358b0..6cbc195656f0 100644 --- a/dspace-services/src/main/java/org/dspace/services/EventService.java +++ b/dspace-services/src/main/java/org/dspace/services/EventService.java @@ -7,6 +7,8 @@ */ package org.dspace.services; +import java.util.function.Supplier; + import org.dspace.services.model.Event; import org.dspace.services.model.EventListener; @@ -34,4 +36,11 @@ public interface EventService { */ public void registerEventListener(EventListener listener); + /** + * Fires an event asynchronously by retrieving it from the given supplier + * + * @param eventSupplier + */ + void fireAsyncEvent(Supplier eventSupplier); + } diff --git a/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java b/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java index 1787c688f6a1..322b0dc8be20 100644 --- a/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java +++ b/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java @@ -10,11 +10,17 @@ import java.util.Map; import java.util.Random; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.function.Supplier; import javax.annotation.PreDestroy; +import com.google.common.util.concurrent.MoreExecutors; import org.apache.commons.lang3.ArrayUtils; +import org.dspace.services.ConfigurationService; import org.dspace.services.EventService; import org.dspace.services.RequestService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.model.Event; import org.dspace.services.model.Event.Scope; import org.dspace.services.model.EventListener; @@ -32,6 +38,8 @@ */ public final class SystemEventService implements EventService { + private static final int DEFAULT_THREAD_SIZE = 2; + private final Logger log = LoggerFactory.getLogger(SystemEventService.class); /** @@ -42,6 +50,8 @@ public final class SystemEventService implements EventService { private final RequestService requestService; private EventRequestInterceptor requestInterceptor; + private ExecutorService executorService; + @Autowired(required = true) public SystemEventService(RequestService requestService) { if (requestService == null) { @@ -58,6 +68,9 @@ public SystemEventService(RequestService requestService) { public void shutdown() { this.requestInterceptor = null; // clear the interceptor this.listenersMap.clear(); + if (this.executorService != null && !this.executorService.isShutdown()) { + this.executorService.shutdown(); + } } @@ -83,6 +96,25 @@ public void fireEvent(Event event) { } } + @Override + public void fireAsyncEvent(Supplier eventSupplier) { + initExecutor(); + this.executorService.submit(() -> this.fireEvent(eventSupplier.get())); + } + + private void initExecutor() { + if (this.executorService != null) { + return; + } + ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + int threadSize = configurationService.getIntProperty("system-event.thread.size", DEFAULT_THREAD_SIZE); + if (threadSize == 0) { + this.executorService = MoreExecutors.newDirectExecutorService(); + } else { + this.executorService = Executors.newFixedThreadPool(threadSize); + } + } + /* (non-Javadoc) * @see org.dspace.services.EventService#registerEventListener(org.dspace.services.model.EventListener) */ diff --git a/dspace-services/src/main/java/org/dspace/utils/CallStackUtils.java b/dspace-services/src/main/java/org/dspace/utils/CallStackUtils.java new file mode 100644 index 000000000000..cb60a223a184 --- /dev/null +++ b/dspace-services/src/main/java/org/dspace/utils/CallStackUtils.java @@ -0,0 +1,44 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.utils; + +import static java.lang.StackWalker.Option.RETAIN_CLASS_REFERENCE; + +import java.lang.StackWalker.StackFrame; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.Logger; + +/** + * Utility methods for manipulating call stacks. + * + * @author mwood + */ +public class CallStackUtils { + private CallStackUtils() {} + + /** + * Log the class, method and line of the caller's caller. + * + * @param log logger to use. + * @param level log at this level, if enabled. + */ + static public void logCaller(Logger log, Level level) { + if (log.isEnabled(level)) { + StackWalker stack = StackWalker.getInstance(RETAIN_CLASS_REFERENCE); + StackFrame caller = stack.walk(stream -> stream.skip(2) + .findFirst() + .get()); + String callerClassName = caller.getDeclaringClass().getCanonicalName(); + String callerMethodName = caller.getMethodName(); + int callerLine = caller.getLineNumber(); + log.log(level, "Called from {}.{} line {}.", + callerClassName, callerMethodName, callerLine); + } + } +} diff --git a/dspace-sword/pom.xml b/dspace-sword/pom.xml index 16f8a396fbee..803eba859119 100644 --- a/dspace-sword/pom.xml +++ b/dspace-sword/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 .. @@ -24,25 +24,6 @@ ${basedir}/.. - - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - - - diff --git a/dspace-swordv2/pom.xml b/dspace-swordv2/pom.xml index b35fb7388a15..1c8103016e5d 100644 --- a/dspace-swordv2/pom.xml +++ b/dspace-swordv2/pom.xml @@ -13,7 +13,7 @@ org.dspace dspace-parent - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 .. @@ -22,38 +22,6 @@ ${basedir}/.. - - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - - postgres-support - - - !db.name - - - - - org.postgresql - postgresql - - - - - javax.servlet diff --git a/dspace/config/crosswalks/mapConverter-datacitePublicationLicense.properties b/dspace/config/crosswalks/mapConverter-datacitePublicationLicense.properties new file mode 100644 index 000000000000..05bcf86c28da --- /dev/null +++ b/dspace/config/crosswalks/mapConverter-datacitePublicationLicense.properties @@ -0,0 +1,8 @@ +CC\ BY = https://creativecommons.org/licenses/by/4.0/legalcode +CC\ BY\-SA = https://creativecommons.org/licenses/by-sa/4.0/legalcode +CC\ BY\-ND = https://creativecommons.org/licenses/by-nd/4.0/legalcode +CC\ BY\-NC = https://creativecommons.org/licenses/by-nc/4.0/legalcode +CC\ BY\-NC\-SA = https://creativecommons.org/licenses/by-nc-sa/4.0/legalcode +CC\ BY\-NC\-ND = https://creativecommons.org/licenses/by-nc-nd/4.0/legalcode +CC0 = https://creativecommons.org/share-your-work/public-domain/cc0/ +PDM = https://creativecommons.org/publicdomain/mark/1.0/ \ No newline at end of file diff --git a/dspace/config/crosswalks/mapConverter-datacitePublicationRights.properties b/dspace/config/crosswalks/mapConverter-datacitePublicationRights.properties new file mode 100644 index 000000000000..049024f7dc56 --- /dev/null +++ b/dspace/config/crosswalks/mapConverter-datacitePublicationRights.properties @@ -0,0 +1,4 @@ +openaccess = http://purl.org/coar/access_right/c_abf2 +embargoed = http://purl.org/coar/access_right/c_f1cf +restricted = http://purl.org/coar/access_right/c_16ec +metadata\ only = http://purl.org/coar/access_right/c_14cb \ No newline at end of file diff --git a/dspace/config/crosswalks/mapConverter-datacitePublicationTypes.properties b/dspace/config/crosswalks/mapConverter-datacitePublicationTypes.properties new file mode 100644 index 000000000000..0ef150d6f8f8 --- /dev/null +++ b/dspace/config/crosswalks/mapConverter-datacitePublicationTypes.properties @@ -0,0 +1,50 @@ +Resource\ Types\:\:text = Text +Resource\ Types\:\:text\:\:annotation = Text +Resource\ Types\:\:text\:\:bibliography = Text +Resource\ Types\:\:text\:\:blog\ post = Text +Resource\ Types\:\:text\:\:book = Book +Resource\ Types\:\:text\:\:book\:\:book\ part = BookChapter +Resource\ Types\:\:text\:\:conference\ output = ConferencePaper +Resource\ Types\:\:text\:\:conference\ output\:\:conference paper not in proceedings = ConferencePaper +Resource\ Types\:\:text\:\:conference\ output\:\:conference poster not in proceedings = ConferencePaper +Resource\ Types\:\:text\:\:conference\ output\:\:conference presentation = ConferenceProceeding +Resource\ Types\:\:text\:\:conference\ output\:\:conference proceedings = ConferenceProceeding +Resource\ Types\:\:text\:\:journal = Journal +Resource\ Types\:\:text\:\:journal\:\:editorial = Journal +Resource\ Types\:\:text\:\:journal\:\:journal\ article = JournalArticle +Resource\ Types\:\:text\:\:journal\:\:journal\ article\:\:corrigendum = JournalArticle +Resource\ Types\:\:text\:\:journal\:\:journal\ article\:\:data\ paper = JournalArticle +Resource\ Types\:\:text\:\:journal\:\:journal\ article\:\:research\ article = JournalArticle +Resource\ Types\:\:text\:\:journal\:\:journal\ article\:\:review\ article = JournalArticle +Resource\ Types\:\:text\:\:journal\:\:journal\ article\:\:software\ paper = JournalArticle +Resource\ Types\:\:text\:\:journal\:\:letter\ to\ the\ editor = Journal +Resource\ Types\:\:text\:\:lecture = Text +Resource\ Types\:\:text\:\:letter = Text +Resource\ Types\:\:text\:\:magazine = Text +Resource\ Types\:\:text\:\:manuscript = Text +Resource\ Types\:\:text\:\:musical\ notation = Sound +Resource\ Types\:\:text\:\:newspaper = Text +Resource\ Types\:\:text\:\:newspaper\:\:newspaper\ article = Text +Resource\ Types\:\:text\:\:other\ periodical = Text +Resource\ Types\:\:text\:\:preprint = Preprint +Resource\ Types\:\:text\:\:report = Report +Resource\ Types\:\:text\:\:report\:\:clinical\ study = Report +Resource\ Types\:\:text\:\:report\:\:data\ management\ plan = OutputManagementPlan +Resource\ Types\:\:text\:\:report\:\:memorandum = Report +Resource\ Types\:\:text\:\:report\:\:policy\ report = Report +Resource\ Types\:\:text\:\:report\:\:project\ deliverable = Report +Resource\ Types\:\:text\:\:report\:\:research\ protocol = Report +Resource\ Types\:\:text\:\:report\:\:research\ report = Report +Resource\ Types\:\:text\:\:report\:\:technical\ report = Report +Resource\ Types\:\:text\:\:research\ proposal = Text +Resource\ Types\:\:text\:\:review = PeerReview +Resource\ Types\:\:text\:\:review\:\:book\ review = PeerReview +Resource\ Types\:\:text\:\:review\:\:commentary = PeerReview +Resource\ Types\:\:text\:\:review\:\:peer\ review = PeerReview +Resource\ Types\:\:text\:\:technical\ documentation = Text +Resource\ Types\:\:text\:\:thesis = Dissertation +Resource\ Types\:\:text\:\:thesis\:\:bachelor\ thesis = Dissertation +Resource\ Types\:\:text\:\:thesis\:\:doctoral\ thesis = Dissertation +Resource\ Types\:\:text\:\:thesis\:\:master\ thesis = Dissertation +Resource\ Types\:\:text\:\:transcription = Text +Resource\ Types\:\:text\:\:working\ paper = Preprint diff --git a/dspace/config/crosswalks/mapConverter-scopusToCoarPublicationTypes.properties b/dspace/config/crosswalks/mapConverter-scopusToCoarPublicationTypes.properties index 35e759cd6860..33aa97437dda 100644 --- a/dspace/config/crosswalks/mapConverter-scopusToCoarPublicationTypes.properties +++ b/dspace/config/crosswalks/mapConverter-scopusToCoarPublicationTypes.properties @@ -1,14 +1,14 @@ ar = Resource Types::text::journal::journal article er = Resource Types::text::journal::journal article::corrigendum re = Resource Types::text::journal::journal article::review article -cp = Resource Types::text::conference outputs::conference proceedings::conference paper +cp = Resource Types::text::conference output::conference proceedings::conference paper bk = Resource Types::text::book ch = Resource Types::text::book chapter ed = Resource Types::text::journal::editorial le = Resource Types::text::letter -cr = Conference Review -ab = Abstract Report -bz = Business Article -no = Note -pr = Press Release -sh = Short Survey \ No newline at end of file +cr = Resource Types::text::review +ab = Resource Types::text::report +bz = Resource Types::text::journal::journal article +no = Resource Types::text +pr = Resource Types::text +sh = Resource Types::text \ No newline at end of file diff --git a/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl b/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl index 7b66eaf04372..df3d94b1258a 100644 --- a/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl +++ b/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl @@ -13,13 +13,13 @@ + version="1.0"> - + + + @@ -93,6 +96,14 @@ + + + + @@ -658,6 +669,40 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -1125,11 +1170,11 @@ - + + select="/doc:metadata/doc:element[@name='others']/doc:element[@name='access-status']/doc:field[@name='value']/text()"/> @@ -1207,7 +1252,7 @@ - + + + + + + + + + + + open access + + + embargoed access + + + restricted access + + + metadata only access + + + + + + + doi + + + + + + + + + + doi + + + + + diff --git a/dspace/config/crosswalks/oai/metadataFormats/uketd_dc.xsl b/dspace/config/crosswalks/oai/metadataFormats/uketd_dc.xsl index a3a4e6667046..5c434e49ed35 100644 --- a/dspace/config/crosswalks/oai/metadataFormats/uketd_dc.xsl +++ b/dspace/config/crosswalks/oai/metadataFormats/uketd_dc.xsl @@ -115,6 +115,20 @@ + + + + + + + + + + + + + + diff --git a/dspace/config/crosswalks/oai/service-description.xml b/dspace/config/crosswalks/oai/service-description.xml index a1bde8827083..fdbf62612d40 100644 --- a/dspace/config/crosswalks/oai/service-description.xml +++ b/dspace/config/crosswalks/oai/service-description.xml @@ -2,6 +2,6 @@ https://www.openaire.eu/cerif-profile/vocab/OpenAIRE_Service_Compatibility#1.1 ${dspace.name} An example CRIS that complies with the OpenAIRE Guidelines for CRIS Managers v.1.1. - ${dspace.url} - http://${dspace.hostname}/oai/openairecris + ${dspace.ui.url} + ${dspace.server.url}/oai/openairecris diff --git a/dspace/config/crosswalks/signposting/mapConverter-dspace-to-schema-org-uri.properties b/dspace/config/crosswalks/signposting/mapConverter-dspace-to-schema-org-uri.properties new file mode 100644 index 000000000000..e2fef507b77a --- /dev/null +++ b/dspace/config/crosswalks/signposting/mapConverter-dspace-to-schema-org-uri.properties @@ -0,0 +1,23 @@ +# Mapping between DSpace common publication's types and a schema.org URI +Animation = https://schema.org/3DModel +Article = https://schema.org/ScholarlyArticle +Book = https://schema.org/Book +Book\ chapter = https://schema.org/Chapter +Dataset = https://schema.org/Dataset +Learning\ Object = https://schema.org/LearningResource +Image = https://schema.org/ImageObject +Image,\ 3-D = https://schema.org/3DModel +Map = https://schema.org/Map +Musical\ Score = https://schema.org/MusicComposition +Plan\ or\ blueprint = https://schema.org/Map +Preprint = https://schema.org/VisualArtwork +Presentation = https://schema.org/PresentationDigitalDocument +Recording,\ acoustical = https://schema.org/MusicRecording +Recording,\ musical = https://schema.org/MusicRecording +Recording,\ oral = https://schema.org/MusicRecording +Software = https://schema.org/SoftwareApplication +Technical\ Report = https://schema.org/Report +Thesis = https://schema.org/Thesis +Video = https://schema.org/VideoObject +Working\ Paper = https://schema.org/TechArticle +Other = https://schema.org/CreativeWork \ No newline at end of file diff --git a/dspace/config/crosswalks/template/orgUnit-cerif-xml.template b/dspace/config/crosswalks/template/orgUnit-cerif-xml.template index 484d2daaa6c4..69d94a27a320 100644 --- a/dspace/config/crosswalks/template/orgUnit-cerif-xml.template +++ b/dspace/config/crosswalks/template/orgUnit-cerif-xml.template @@ -5,6 +5,7 @@ @organization.legalName@ @organization.identifier@ @oairecerif.identifier.url@ + @organization.identifier.ror@ @relation.organization-parentOrganization.start@ diff --git a/dspace/config/crosswalks/template/patent-datacite-xml.template b/dspace/config/crosswalks/template/patent-datacite-xml.template new file mode 100644 index 000000000000..a08d36f7a4fb --- /dev/null +++ b/dspace/config/crosswalks/template/patent-datacite-xml.template @@ -0,0 +1,53 @@ + + + @virtual.primary-doi.dc-identifier-doi@ + + @group.dc-contributor-author.start@ + + @dc.contributor.author@ + @relation.dc-contributor-author.start@ + @person.identifier.orcid@ + @relation.dc-contributor-author.end@ + @relation.oairecerif-author-affiliation.start@ + @dc.title@ + @relation.oairecerif-author-affiliation.end@ + @if.not.authority.oairecerif-author-affiliation.start@ + @oairecerif.author.affiliation@ + @if.not.authority.oairecerif-author-affiliation.end@ + + @group.dc-contributor-author.end@ + + + @dc.title@ + + @dc.publisher@ + @virtual.date.dc-date-issued.YYYY@ + + @dc.subject@ + + + @dc.date.issued@ + @datacite.available@ + + @dc.language.iso@ + + + @dc.identifier.uri@ + @virtual.alternative-doi.dc-identifier-doi@ + + @dc.description.version@ + + + @oaire.licenseCondition@ + + @datacite.rights@ + + + @dc.description.abstract@ + @dc.description@ + + \ No newline at end of file diff --git a/dspace/config/crosswalks/template/product-datacite-xml.template b/dspace/config/crosswalks/template/product-datacite-xml.template new file mode 100644 index 000000000000..7409088142de --- /dev/null +++ b/dspace/config/crosswalks/template/product-datacite-xml.template @@ -0,0 +1,52 @@ + + + @virtual.primary-doi.dc-identifier-doi@ + + @group.dc-contributor-author.start@ + + @dc.contributor.author@ + @relation.dc-contributor-author.start@ + @person.identifier.orcid@ + @relation.dc-contributor-author.end@ + @relation.oairecerif-author-affiliation.start@ + @dc.title@ + @relation.oairecerif-author-affiliation.end@ + @if.not.authority.oairecerif-author-affiliation.start@ + @oairecerif.author.affiliation@ + @if.not.authority.oairecerif-author-affiliation.end@ + + @group.dc-contributor-author.end@ + + + @dc.title@ + + @dc.publisher@ + @virtual.date.dc-date-issued.YYYY@ + + @dc.subject@ + + + @dc.date.issued@ + + @dc.language.iso@ + + + @dc.identifier.uri@ + @virtual.alternative-doi.dc-identifier-doi@ + + @dc.description.version@ + + + @oaire.licenseCondition@ + + @datacite.rights@ + + + @dc.description.abstract@ + @dc.description@ + + \ No newline at end of file diff --git a/dspace/config/crosswalks/template/publication-datacite-xml.template b/dspace/config/crosswalks/template/publication-datacite-xml.template new file mode 100644 index 000000000000..a08d36f7a4fb --- /dev/null +++ b/dspace/config/crosswalks/template/publication-datacite-xml.template @@ -0,0 +1,53 @@ + + + @virtual.primary-doi.dc-identifier-doi@ + + @group.dc-contributor-author.start@ + + @dc.contributor.author@ + @relation.dc-contributor-author.start@ + @person.identifier.orcid@ + @relation.dc-contributor-author.end@ + @relation.oairecerif-author-affiliation.start@ + @dc.title@ + @relation.oairecerif-author-affiliation.end@ + @if.not.authority.oairecerif-author-affiliation.start@ + @oairecerif.author.affiliation@ + @if.not.authority.oairecerif-author-affiliation.end@ + + @group.dc-contributor-author.end@ + + + @dc.title@ + + @dc.publisher@ + @virtual.date.dc-date-issued.YYYY@ + + @dc.subject@ + + + @dc.date.issued@ + @datacite.available@ + + @dc.language.iso@ + + + @dc.identifier.uri@ + @virtual.alternative-doi.dc-identifier-doi@ + + @dc.description.version@ + + + @oaire.licenseCondition@ + + @datacite.rights@ + + + @dc.description.abstract@ + @dc.description@ + + \ No newline at end of file diff --git a/dspace/config/default.license b/dspace/config/default.license index 0b5b3cb4b8f1..390e9786688d 100644 --- a/dspace/config/default.license +++ b/dspace/config/default.license @@ -3,34 +3,16 @@ This sample license is provided for informational purposes only. NON-EXCLUSIVE DISTRIBUTION LICENSE -By signing and submitting this license, you (the author(s) or copyright -owner) grants to DSpace University (DSU) the non-exclusive right to reproduce, -translate (as defined below), and/or distribute your submission (including -the abstract) worldwide in print and electronic format and in any medium, -including but not limited to audio or video. - -You agree that DSU may, without changing the content, translate the -submission to any medium or format for the purpose of preservation. - -You also agree that DSU may keep more than one copy of this submission for -purposes of security, back-up and preservation. - -You represent that the submission is your original work, and that you have -the right to grant the rights contained in this license. You also represent -that your submission does not, to the best of your knowledge, infringe upon -anyone's copyright. - -If the submission contains material for which you do not hold copyright, -you represent that you have obtained the unrestricted permission of the -copyright owner to grant DSU the rights required by this license, and that -such third-party owned material is clearly identified and acknowledged -within the text or content of the submission. - -IF THE SUBMISSION IS BASED UPON WORK THAT HAS BEEN SPONSORED OR SUPPORTED -BY AN AGENCY OR ORGANIZATION OTHER THAN DSU, YOU REPRESENT THAT YOU HAVE -FULFILLED ANY RIGHT OF REVIEW OR OTHER OBLIGATIONS REQUIRED BY SUCH -CONTRACT OR AGREEMENT. - -DSU will clearly identify your name(s) as the author(s) or owner(s) of the -submission, and will not make any alteration, other than as allowed by this -license, to your submission. +By signing and submitting this license, you (the author(s) or copyright owner) grants to DSpace University (DSU) the non-exclusive right to reproduce, translate (as defined below), and/or distribute your submission (including the abstract) worldwide in print and electronic format and in any medium, including but not limited to audio or video. + +You agree that DSU may, without changing the content, translate the submission to any medium or format for the purpose of preservation. + +You also agree that DSU may keep more than one copy of this submission for purposes of security, back-up and preservation. + +You represent that the submission is your original work, and that you have the right to grant the rights contained in this license. You also represent that your submission does not, to the best of your knowledge, infringe upon anyone's copyright. + +If the submission contains material for which you do not hold copyright, you represent that you have obtained the unrestricted permission of the copyright owner to grant DSU the rights required by this license, and that such third-party owned material is clearly identified and acknowledged within the text or content of the submission. + +IF THE SUBMISSION IS BASED UPON WORK THAT HAS BEEN SPONSORED OR SUPPORTED BY AN AGENCY OR ORGANIZATION OTHER THAN DSU, YOU REPRESENT THAT YOU HAVE FULFILLED ANY RIGHT OF REVIEW OR OTHER OBLIGATIONS REQUIRED BY SUCH CONTRACT OR AGREEMENT. + +DSU will clearly identify your name(s) as the author(s) or owner(s) of the submission, and will not make any alteration, other than as allowed by this license, to your submission. diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 2fa3578cd90e..2e14eeec788c 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -38,6 +38,7 @@ dspace.ui.url = http://localhost:4000 # Name of the site dspace.name = DSpace at My University +dspace.shortname = DSpace # Assetstore configurations have moved to config/modules/assetstore.cfg # and config/spring/api/bitstore.xml. @@ -47,6 +48,9 @@ dspace.name = DSpace at My University # Default language for metadata values default.language = en_US +# Url of subscriptions page +subscription.url = ${dspace.ui.url}/subscriptions + # Solr server/webapp. # DSpace uses Solr for all search/browse capability (and for usage statistics). # Since DSpace 7, SOLR must be installed as a stand-alone service. @@ -74,23 +78,15 @@ solr.multicorePrefix = # solr.client.timeToLive = 600 ##### Database settings ##### -# DSpace only supports two database types: PostgreSQL or Oracle -# PostgreSQL is highly recommended. -# Oracle support is DEPRECATED. See https://github.com/DSpace/DSpace/issues/8214 +# DSpace ONLY supports PostgreSQL at this time. # URL for connecting to database -# * Postgres template: jdbc:postgresql://localhost:5432/dspace -# * Oracle template (DEPRECATED): jdbc:oracle:thin:@//localhost:1521/xe db.url = jdbc:postgresql://localhost:5432/dspace -# JDBC Driver -# * For Postgres: org.postgresql.Driver -# * For Oracle (DEPRECATED): oracle.jdbc.OracleDriver +# JDBC Driver for PostgreSQL db.driver = org.postgresql.Driver -# Database Dialect (for Hibernate) -# * For Postgres: org.hibernate.dialect.PostgreSQL94Dialect -# * For Oracle (DEPRECATED): org.hibernate.dialect.Oracle10gDialect +# PostgreSQL Database Dialect (for Hibernate) db.dialect = org.hibernate.dialect.PostgreSQL94Dialect # Database username and password @@ -98,9 +94,7 @@ db.username = dspace db.password = dspace # Database Schema name -# * For Postgres, this is often "public" (default schema) -# * For Oracle (DEPRECATED), schema is equivalent to the username of your database account, -# so this may be set to ${db.username} in most scenarios. +# For PostgreSQL, this is often "public" (default schema) db.schema = public ## Database Connection pool parameters @@ -281,6 +275,10 @@ identifier.doi.prefix = 10.5072 # it from other services also minting DOIs under your prefix? identifier.doi.namespaceseparator = dspace/ +# if you want, you can specify custom metadata field for doi identifier +# if nothing specified, then will be used dc.identifier.doi as default +identifier.doi.metadata = dc.identifier.doi + ##### Edit Item configurations ##### # This configuration allows to set a group that will able to # use edit metadata mode @@ -476,6 +474,12 @@ filter.plugins = Branded Preview JPEG # remove "JPEG Thumbnail" from the plugin list # uncomment and insert the following line into the plugin list # ImageMagick Image Thumbnail, ImageMagick PDF Thumbnail, \ +# [To enable ImageMagick Video Thumbnails (requires both ImageMagick and ffmpeg installed)]: +# uncomment and insert the following line into the plugin list +# ImageMagick Video Thumbnail, \ +# NOTE: pay attention to the ImageMagick policies and reource limits in its policy.xml +# configuration file. The limits may have to be increased if a "cache resources +# exhausted" error is thrown. #Assign 'human-understandable' names to each filter plugin.named.org.dspace.app.mediafilter.FormatFilter = org.dspace.app.mediafilter.TikaTextExtractionFilter = Text Extractor @@ -484,6 +488,7 @@ plugin.named.org.dspace.app.mediafilter.FormatFilter = org.dspace.app.mediafilte plugin.named.org.dspace.app.mediafilter.FormatFilter = org.dspace.app.mediafilter.PDFBoxThumbnail = PDFBox JPEG Thumbnail plugin.named.org.dspace.app.mediafilter.FormatFilter = org.dspace.app.mediafilter.ImageMagickImageThumbnailFilter = ImageMagick Image Thumbnail plugin.named.org.dspace.app.mediafilter.FormatFilter = org.dspace.app.mediafilter.ImageMagickPdfThumbnailFilter = ImageMagick PDF Thumbnail +plugin.named.org.dspace.app.mediafilter.FormatFilter = org.dspace.app.mediafilter.ImageMagickVideoThumbnailFilter = ImageMagick Video Thumbnail #Configure each filter's input format(s) # NOTE: The TikaTextExtractionFilter can support any file formats that are supported by Apache Tika. So, you can easily @@ -503,10 +508,11 @@ filter.org.dspace.app.mediafilter.TikaTextExtractionFilter.inputFormats = OpenDo filter.org.dspace.app.mediafilter.TikaTextExtractionFilter.inputFormats = OpenDocument Text filter.org.dspace.app.mediafilter.TikaTextExtractionFilter.inputFormats = RTF filter.org.dspace.app.mediafilter.TikaTextExtractionFilter.inputFormats = Text -filter.org.dspace.app.mediafilter.JPEGFilter.inputFormats = BMP, GIF, JPEG, image/png -filter.org.dspace.app.mediafilter.BrandedPreviewJPEGFilter.inputFormats = BMP, GIF, JPEG, image/png -filter.org.dspace.app.mediafilter.ImageMagickImageThumbnailFilter.inputFormats = BMP, GIF, image/png, JPG, TIFF, JPEG, JPEG 2000 +filter.org.dspace.app.mediafilter.JPEGFilter.inputFormats = BMP, GIF, JPEG, PNG +filter.org.dspace.app.mediafilter.BrandedPreviewJPEGFilter.inputFormats = BMP, GIF, JPEG, PNG +filter.org.dspace.app.mediafilter.ImageMagickImageThumbnailFilter.inputFormats = BMP, GIF, PNG, JPG, TIFF, JPEG, JPEG 2000 filter.org.dspace.app.mediafilter.ImageMagickPdfThumbnailFilter.inputFormats = Adobe PDF +filter.org.dspace.app.mediafilter.ImageMagickVideoThumbnailFilter.inputFormats = Video MP4 filter.org.dspace.app.mediafilter.PDFBoxThumbnail.inputFormats = Adobe PDF #Publicly accessible thumbnails of restricted content. @@ -621,7 +627,7 @@ crosswalk.dissemination.DataCite.preferList = false crosswalk.dissemination.DataCite.publisher = My University #crosswalk.dissemination.DataCite.dataManager = # defaults to publisher #crosswalk.dissemination.DataCite.hostingInstitution = # defaults to publisher -crosswalk.dissemination.DataCite.namespace = http://datacite.org/schema/kernel-3 +crosswalk.dissemination.DataCite.namespace = http://datacite.org/schema/kernel-4 # Crosswalk Plugin Configuration: # The purpose of Crosswalks is to translate an external metadata format to/from @@ -794,7 +800,9 @@ event.dispatcher.default.class = org.dspace.event.BasicDispatcher # Add doi here if you are using org.dspace.identifier.DOIIdentifierProvider to generate DOIs. # Adding doi here makes DSpace send metadata updates to your doi registration agency. # Add rdf here, if you are using dspace-rdf to export your repository content as RDF. -event.dispatcher.default.consumers = versioning, discovery, eperson, dedup, crisconsumer, orcidqueue, audit, nbeventsdelete, referenceresolver, orcidwebhook, itemenhancer, customurl, reciprocal +# Add iiif here, if you are using dspace-iiif. +# Add orcidqueue here, if the integration with ORCID is configured and wish to enable the synchronization queue functionality +event.dispatcher.default.consumers = versioning, discovery, eperson, dedup, crisconsumer, orcidqueue, audit, nbeventsdelete, referenceresolver, orcidwebhook, itemenhancer, customurl, reciprocal, filetypemetadataenhancer # The noindex dispatcher will not create search or browse indexes (useful for batch item imports) @@ -870,10 +878,18 @@ event.consumer.customurl.filters = Item+Install|Modify|Modify_Metadata event.consumer.orcidqueue.class = org.dspace.orcid.consumer.OrcidQueueConsumer event.consumer.orcidqueue.filters = Item+Install|Modify|Modify_Metadata|Delete|Remove +# item submission config reload consumer +event.consumer.submissionconfig.class = org.dspace.submit.consumer.SubmissionConfigConsumer +event.consumer.submissionconfig.filters = Collection+Modify_Metadata + # reciprocal consumer event.consumer.reciprocal.class = org.dspace.content.authority.ReciprocalItemAuthorityConsumer event.consumer.reciprocal.filters = Item+INSTALL|MODIFY_METADATA|MODIFY +# FileType consumer +event.consumer.filetypemetadataenhancer.class = org.dspace.app.filetype.consumer.FileTypeMetadataEnhancerConsumer +event.consumer.filetypemetadataenhancer.filters = Item+Create|Modify_Metadata:Bitstream+Create|Modify_Metadata|Delete + # ...set to true to enable testConsumer messages to standard output #testConsumer.verbose = true @@ -894,7 +910,7 @@ plugin.single.org.dspace.embargo.EmbargoSetter = org.dspace.embargo.DefaultEmbar plugin.single.org.dspace.embargo.EmbargoLifter = org.dspace.embargo.DefaultEmbargoLifter # values for the forever embargo date threshold -# This threshold date is used in the default access status helper to dermine if an item is +# This threshold date is used in the default access status helper to determine if an item is # restricted or embargoed based on the start date of the primary (or first) file policies. # In this case, if the policy start date is inferior to the threshold date, the status will # be embargo, else it will be restricted. @@ -931,7 +947,7 @@ org.dspace.app.itemexport.life.span.hours = 48 # The maximum size in Megabytes the export should be. This is enforced before the # compression. Each bitstream's size in each item being exported is added up, if their -# cummulative sizes are more than this entry the export is not kicked off +# cumulative sizes are more than this entry the export is not kicked off org.dspace.app.itemexport.max.size = 200 ### Batch Item import settings ### @@ -944,10 +960,6 @@ org.dspace.app.batchitemimport.work.dir = ${dspace.dir}/imports # default = false, (disabled) #org.dspace.content.Collection.findAuthorizedPerformanceOptimize = true -# For backwards compatibility, the subscription emails by default include any modified items -# uncomment the following entry for only new items to be emailed -# eperson.subscription.onlynew = true - # Identifier providers. # Following are configuration values for the EZID DOI provider, with appropriate @@ -999,6 +1011,9 @@ registry.metadata.load = openaire4-types.xml registry.metadata.load = dspace-types.xml registry.metadata.load = iiif-types.xml registry.metadata.load = bitstream-types.xml +registry.metadata.load = dataquality-types.xml + +registry.bitstream-formats.load = bitstream-formats.xml #---------------------------------------------------------------# #-----------------UI-Related CONFIGURATIONS---------------------# @@ -1055,7 +1070,7 @@ submission.default.entitytype = Publication #### Creative Commons settings ###### # The url to the web service API -cc.api.rooturl = http://api.creativecommons.org/rest/1.5 +cc.api.rooturl = https://api.creativecommons.org/rest/1.5 # Metadata field to hold CC license URI of selected license cc.license.uri = dc.rights.uri @@ -1072,12 +1087,12 @@ cc.submit.addbitstream = true # A list of license classes that should be excluded from selection process # class names - comma-separated list - must exactly match what service returns. # At time of implementation, these are: -# publicdomain - "Public Domain" +# publicdomain - "Public Domain" (this is now the same as CC0) # standard - "Creative Commons" # recombo - "Sampling" # zero - "CC0" # mark - "Public Domain Mark" -cc.license.classfilter = recombo, mark +cc.license.classfilter = publicdomain, recombo, mark # Jurisdiction of the creative commons license -- is it ported or not? # Use the key from the url seen in the response from the api call, @@ -1140,6 +1155,10 @@ webui.preview.brand.fontpoint = 12 ##### Settings for item count (strength) information #### +# Whether to display collection and community strengths (i.e. item counts) +# By default, this feature is disabled. +# webui.strengths.show = false + # Counts fetched in real time will perform an actual count of the # index contents every time a page with this feature is requested, # which may not scale as well as a cached count. @@ -1226,7 +1245,7 @@ webui.preview.brand.fontpoint = 12 # webui.browse.index.1 = dateissued:itemResOutputs:dateissued webui.browse.index.2 = author:metadata:dc.contributor.*\,dc.creator:text -webui.browse.index.3 = title:itemResOutputs:title +webui.browse.index.3 = title:item:title webui.browse.index.4 = subject:metadata:dc.subject.*:text webui.browse.index.5 = rodept:metadata:cris.virtual.department:text webui.browse.index.6 = type:metadata:dc.type:text @@ -1240,12 +1259,18 @@ webui.browse.index.11 = eqtitle:itemEquipment:title ## example of authority-controlled browse category - see authority control config #webui.browse.index.5 = lcAuthor:metadataAuthority:dc.contributor.author:authority +# By default, browse hierarchical indexes are created based on the used controlled +# vocabularies in the submission forms. These could be disabled adding the name of +# the vocabularies to exclude in this comma-separated property. +# (Requires reboot of servlet container, e.g. Tomcat, to reload) +# webui.browse.vocabularies.disabled = srsc + # Browse extra filters # To configure a filter for a specific browse you can define the following configuration property # browse.solr.bi_.filter = -# When you are limiting a two level browse you need to configure, typically the same filter, also for the second level. +# When you are limiting a two level browse you need to configure, typically the same filter, also for the second level. # In such case the browse index is used -# browse.solr.bi__dis.filter = +# browse.solr.bi__dis.filter = browse.solr.bi_2_dis.filter= entityType:Publication OR entityType:Product OR entityType:Patent browse.solr.bi_4_dis.filter= entityType:Publication OR entityType:Product OR entityType:Patent browse.solr.bi_5_dis.filter= entityType:Publication OR entityType:Product OR entityType:Patent @@ -1487,6 +1512,12 @@ websvc.opensearch.formats = html,atom,rss # Use -1 to force all bitstream to be served inline webui.content_disposition_threshold = 8388608 +#### Content Attachment Disposition Formats #### +# +# Set which mimetypes, file extensions will NOT be opened inline +# Files with these mimetypes/extensions will always be downloaded, +# regardless of the threshold above +webui.content_disposition_format = text/richtext #### Multi-file HTML document/site settings ##### # TODO: UNSUPPORTED in DSpace 7.0. May be re-added in a later release @@ -1513,19 +1544,6 @@ sitemap.dir = ${dspace.dir}/sitemaps # Defaults to "sitemaps", which means they are available at ${dspace.server.url}/sitemaps/ # sitemap.path = sitemaps -# -# Comma-separated list of search engine URLs to 'ping' when a new Sitemap has -# been created. Include everything except the Sitemap URL itself (which will -# be URL-encoded and appended to form the actual URL 'pinged'). -# -sitemap.engineurls = http://www.google.com/webmasters/sitemaps/ping?sitemap= - -# Add this to the above parameter if you have an application ID with Yahoo -# (Replace REPLACE_ME with your application ID) -# http://search.yahooapis.com/SiteExplorerService/V1/updateNotification?appid=REPLACE_ME&url= -# -# No known Sitemap 'ping' URL for MSN/Live search - # Define cron for how frequently the sitemap should refresh. # Defaults to running daily at 1:15am # Cron syntax is defined at https://www.quartz-scheduler.org/api/2.3.0/org/quartz/CronTrigger.html @@ -1579,9 +1597,6 @@ log.report.dir = ${dspace.dir}/log # fields at least the date and title fields as specified by the # webui.browse.index.* configuration options below. # -# If you have enabled thumbnails (webui.browse.thumbnail.show), you must also -# include a 'thumbnail' entry in your columns - this is where the thumbnail will be displayed -# # If you want to mark each item include a 'mark_[value]' (without the brackets - replace the word 'value' with anything that # has a meaning for your mark) entry in your columns - this is where the icon will be displayed. # Do not forget to add a Spring bean with id = "org.dspace.app.itemmarking.ItemMarkingExtractor.[value]" @@ -1589,13 +1604,8 @@ log.report.dir = ${dspace.dir}/log # You can add more than one 'mark_[value]' options (with different value) in case you need to mark items more than one time for # different purposes. Remember to add the respective beans in file 'config/spring/api/item-marking.xml'. # -# webui.itemlist.columns = thumbnail, dc.date.issued(date), dc.title, dc.contributor.* +# webui.itemlist.columns = dc.date.issued(date), dc.title, dc.contributor.* # -# You can customise the width of each column with the following line - you can have numbers (pixels) -# or percentages. For the 'thumbnail' column, a setting of '*' will use the max width specified -# for browse thumbnails (webui.browse.thumbnail.maxwidth, thumbnail.maxwidth) -# webui.itemlist.widths = *, 130, 60%, 40% - # Additionally, you can override the DC fields used on the listing page for # a given browse index and/or sort option. As a sort option or index may be defined # on a field that isn't normally included in the list, this allows you to display @@ -1605,30 +1615,8 @@ log.report.dir = ${dspace.dir}/log # they are listed below is the priority in which they will be used (so a combination # of an index name and sort name will take precedence over just the browse name). # -# webui.itemlist.browse..sort..columns # webui.itemlist.sort..columns -# webui.itemlist.browse..columns # webui.itemlist..columns -# -# In the last case, a sort option name will always take precedence over a browse -# index name. Note also, that for any additional columns you list, you will need to -# ensure there is an itemlist. entry in the messages file. -# -# The following example would display the date of accession in place of the issue date -# whenever the dateaccessioned browse index or sort option is selected. -# -# Just like webui.itemlist.columns, you will need to include a 'thumbnail' entry to display -# and thumbnails in the item list -# -# webui.itemlist.dateaccessioned.columns = thumbnail, dc.date.accessioned(date), dc.title, dc.contributor.* -# -# As above, you can customise the width of the columns for each configured column list, substituting '.widths' for -# '.columns' in the property name. See the setting for webui.itemlist.widths for more details -# webui.itemlist.dateaccessioned.widths = *, 130, 60%, 40% - -# You can also set the overall size of the item list table with the following setting. It can lead to faster -# table rendering when used with the column widths above, but not generally recommended. -# webui.itemlist.tablewidth = 100% ##### SFX Server (OpenURL) ##### @@ -1666,6 +1654,15 @@ log.report.dir = ${dspace.dir}/log # For more details see https://developers.google.com/analytics/devguides/collection/protocol/ga4 # google.analytics.api-secret = +# Ensures only views of bitstreams in configured bundles result in a GA4 event. +# Config can contain multiple bundles for which the bitstream views will result in GA4 events, eg: +# google-analytics.bundles = ORIGINAL, CONTENT +# If config is not set or empty, the default fallback is Constants#CONTENT_BUNDLE_NAME bundle ('ORIGINAL'). +# If config contains 'LICENSE' or 'THUMBNAIL' bundles, it may cause inflated bitstream view numbers. +# Set config to 'none' to disable GA4 bitstream events, eg: +# google-analytics.bundles = none +google-analytics.bundles = ORIGINAL + #################################################################### #---------------------------------------------------------------# #----------------REQUEST ITEM CONFIGURATION---------------------# @@ -1702,7 +1699,7 @@ bte.crossref.apikey = following configuration value. # Note that when apikey is configured by default the service is enabled, see bte.xml for further configuration submission.lookup.ads.apikey = -# In order to use the EPO services you need to obtain an API Key from https://developers.epo.org. Once you get it, add it to the +# In order to use the EPO services you need to obtain an API Key from https://developers.epo.org. Once you get it, add it to the # following configuration value. # Note that when apikey is configured by default the service is enabled, see bte.xml for further configuration epo.consumerKey= @@ -1713,7 +1710,7 @@ vufind.api-search = https://vufind.org/demo/api/v1/search vufind.api-record = https://vufind.org/demo/api/v1/record ############ Researcher Profile configuration ############ -researcher-profile.collection.uuid = +researcher-profile.collection.uuid = researcher-profile.type = Person researcher-profile.hard-delete.enabled = false @@ -1858,6 +1855,33 @@ google.recaptcha.site-verify = https://www.google.com/recaptcha/api/siteverify # checkbox - The "I'm not a robot" Checkbox requires the user to click a checkbox indicating the user is not a robot. #google.recaptcha.mode = +#------------------------------------------------------------------# +#---------------REGISTRATION DATA CONFIGURATION--------------------# +#------------------------------------------------------------------# + +# Configuration for the duration of the token depending on the type +# the format used should be compatible with the standard DURATION format, +# but without the prefix `PT`: +# +# - PT1H -> 1H // hours +# - PT1M -> 1M // minutes +# - PT1S -> 1S // seconds +# +eperson.registration-data.token.orcid.expiration = 1H +eperson.registration-data.token.validation_orcid.expiration = 1H +eperson.registration-data.token.forgot.expiration = 24H +eperson.registration-data.token.register.expiration = 24H +eperson.registration-data.token.invitation.expiration = 24H +eperson.registration-data.token.change_password.expiration = 1H + +# Configuration that enables the schedulable tasks related to the registration +# The property `enabled` should be setted to true to enable it. +eperson.registration-data.scheduler.enabled = true +# Configuration for the task that deletes expired registrations. +# Its value should be compatible with the cron format. +# By default it's scheduled to be run every 15 minutes. +eperson.registration-data.scheduler.expired-registration-data.cron = 0 0/15 * * * ? + #------------------------------------------------------------------# #-------------------MODULE CONFIGURATIONS--------------------------# #------------------------------------------------------------------# @@ -1882,6 +1906,10 @@ module_dir = modules vocabulary.plugin.publication-coar-types.hierarchy.preloadLevel = 2 vocabulary.plugin.patent-coar-types.hierarchy.preloadLevel = 2 +# Custom metadata used for layout loading +# TAB instances +dspace.metadata.layout.tab = dc.type + # Bulk export limitations: 0 means bulk export not allowed, -1 means no limitations # admin users (including community & collection admin) bulk-export.limit.admin = -1 @@ -1890,6 +1918,17 @@ bulk-export.limit.loggedIn = -1 # anonymous users bulk-export.limit.notLoggedIn = 0 +#------------------------------------------------------------------# +#-------------SYSTEM ASYNC EVENT CONFIGURATIONS--------------------# +#------------------------------------------------------------------# +# # +# Configurations for the SystemEventService # +# # +#------------------------------------------------------------------# +# By default the asynchronous executor that dispatches events +# has 2 threads on which schedule events +system-event.thread.size = 2 + # Load default module configs # ---------------------------- # To exclude a module configuration, simply comment out its "include" statement. @@ -1930,6 +1969,7 @@ include = ${module_dir}/orcid.cfg include = ${module_dir}/rdf.cfg include = ${module_dir}/rest.cfg include = ${module_dir}/iiif.cfg +include = ${module_dir}/signposting.cfg include = ${module_dir}/iiifuploader.cfg include = ${module_dir}/iiifuploader.primary.cfg include = ${module_dir}/rawimagesinpdf.cfg @@ -1961,3 +2001,4 @@ include = ${module_dir}/external-providers.cfg include = ${module_dir}/pushocr.cfg include = ${module_dir}/pushocr.force.cfg include = ${module_dir}/cleanup-authority-metadata-relation.cfg +include = ${module_dir}/ror.cfg diff --git a/dspace/config/emails/change_password b/dspace/config/emails/change_password index eb114feeeb0c..908e494596cc 100644 --- a/dspace/config/emails/change_password +++ b/dspace/config/emails/change_password @@ -4,19 +4,15 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'Change Password Request') +#set($subject = "${config.get('dspace.name')}: Change Password Request") #set($phone = ${config.get('mail.message.helpdesk.telephone')}) -To change the password for your DSpace account, please click the link -below: +To change the password for your ${config.get('dspace.name')} account, please click the link below: ${params[0]} -If you need assistance with your account, please email - - ${config.get("mail.helpdesk")} +If you need assistance with your account, please email ${config.get("mail.helpdesk")} #if( $phone ) - or call us at ${phone}. #end -The DSpace Team +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/doi_maintenance_error b/dspace/config/emails/doi_maintenance_error index 5424432f64ce..a86de915469b 100644 --- a/dspace/config/emails/doi_maintenance_error +++ b/dspace/config/emails/doi_maintenance_error @@ -10,9 +10,11 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = "DSpace: Error ${params[0]} DOI ${params[3]}") +#set($subject = "${config.get('dspace.name')}: Error ${params[0]} DOI ${params[3]}") Date: ${params[1]} ${params[0]} DOI ${params[4]} for ${params[2]} with ID ${params[3]} failed: ${params[5]} + +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/export_error b/dspace/config/emails/export_error index 79468c281e3e..5223f64e3379 100644 --- a/dspace/config/emails/export_error +++ b/dspace/config/emails/export_error @@ -6,14 +6,11 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'DSpace - The item export you requested was not completed.') +#set($subject = "${config.get('dspace.name')}: The item export you requested was not completed.") The item export you requested was not completed, due to the following reason: ${params[0]} For more information you may contact your system administrator: ${params[1]} - - -The DSpace Team - +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/export_success b/dspace/config/emails/export_success index b97a3798738e..211e40dd787d 100644 --- a/dspace/config/emails/export_success +++ b/dspace/config/emails/export_success @@ -5,7 +5,7 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'DSpace - Item export requested is ready for download') +#set($subject = "${config.get('dspace.name')}: Item export requested is ready for download") The item export you requested from the repository is now ready for download. You may download the compressed file using the following link: @@ -13,6 +13,4 @@ ${params[0]} This file will remain available for at least ${params[1]} hours. - -The DSpace Team - +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/feedback b/dspace/config/emails/feedback index 7998367c264f..5bf83eda760c 100644 --- a/dspace/config/emails/feedback +++ b/dspace/config/emails/feedback @@ -10,7 +10,7 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'Feedback Form Information') +#set($subject = "${config.get('dspace.name')}: Feedback Form Information") Comments: @@ -24,3 +24,4 @@ Referring Page: ${params[3]} User Agent: ${params[4]} Session: ${params[5]} +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/flowtask_notify b/dspace/config/emails/flowtask_notify index 7e5286e3074c..f277b7f2e79f 100644 --- a/dspace/config/emails/flowtask_notify +++ b/dspace/config/emails/flowtask_notify @@ -7,7 +7,7 @@ ## {4} Task result ## {5} Workflow action taken ## -#set($subject = 'DSpace: Curation Task Report') +#set($subject = "${config.get('dspace.name')}: Curation Task Report") Title: ${params[0]} Collection: ${params[1]} @@ -20,4 +20,4 @@ ${params[4]} Action taken on the submission: ${params[5]} -DSpace +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/harvesting_error b/dspace/config/emails/harvesting_error index d14b51fe8235..40e4fa58e844 100644 --- a/dspace/config/emails/harvesting_error +++ b/dspace/config/emails/harvesting_error @@ -8,7 +8,7 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'DSpace: Harvesting Error') +#set($subject = "${config.get('dspace.name')}: Harvesting Error") Collection ${params[0]} failed on harvest: Date: ${params[1]} @@ -18,3 +18,5 @@ ${params[3]} Exception: ${params[4]} + +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/healthcheck b/dspace/config/emails/healthcheck index bee4d4dec261..bd2ae0be52da 100644 --- a/dspace/config/emails/healthcheck +++ b/dspace/config/emails/healthcheck @@ -3,7 +3,7 @@ ## Parameters: {0} is the output of healthcheck command ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'Repository healthcheck') +#set($subject = "${config.get('dspace.name')}: Repository healthcheck") The healthcheck finished with the following output: ${params[0]} diff --git a/dspace/config/emails/internal_error b/dspace/config/emails/internal_error index ee622f4b3865..266c91b116a1 100644 --- a/dspace/config/emails/internal_error +++ b/dspace/config/emails/internal_error @@ -10,7 +10,7 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'DSpace: Internal Server Error') +#set($subject = "${config.get('dspace.name')}: Internal Server Error") An internal server error occurred on ${params[0]}: Date: ${params[1]} diff --git a/dspace/config/emails/orcid b/dspace/config/emails/orcid new file mode 100644 index 000000000000..f2cd1f50c02c --- /dev/null +++ b/dspace/config/emails/orcid @@ -0,0 +1,22 @@ +## E-mail sent to DSpace users when they try to register with an ORCID account +## +## Parameters: {0} is expanded to a special registration URL +## +## See org.dspace.core.Email for information on the format of this file. +## +#set($subject = "${config.get('dspace.name')} Account Registration") +#set($phone = ${config.get('mail.message.helpdesk.telephone')}) +To complete registration for a DSpace account, please click the link +below: + + ${params[0]} + +If you need assistance with your account, please email + + ${config.get("mail.helpdesk")} +#if( $phone ) + +or call us at ${phone}. +#end + +The DSpace-CRIS Team diff --git a/dspace/config/emails/register b/dspace/config/emails/register index 694be449a887..87b005bc99c3 100644 --- a/dspace/config/emails/register +++ b/dspace/config/emails/register @@ -6,17 +6,13 @@ ## #set($subject = "${config.get('dspace.name')} Account Registration") #set($phone = ${config.get('mail.message.helpdesk.telephone')}) -To complete registration for a DSpace account, please click the link -below: +To complete registration for a ${config.get('dspace.name')} account, please click the link below: ${params[0]} -If you need assistance with your account, please email - - ${config.get("mail.helpdesk")} +If you need assistance with your account, please email ${config.get("mail.helpdesk")} #if( $phone ) - or call us at ${phone}. #end -The DSpace Team +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/registration_notify b/dspace/config/emails/registration_notify index 96c87fa63d9c..0627d17fe02c 100644 --- a/dspace/config/emails/registration_notify +++ b/dspace/config/emails/registration_notify @@ -8,10 +8,12 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'DSpace: Registration Notification') +#set($subject = "${config.get('dspace.name')}: Registration Notification") A new user has registered on ${params[0]} at ${params[1]}: Name: ${params[2]} Email: ${params[3]} Date: ${params[4]} + +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/request_item.admin b/dspace/config/emails/request_item.admin index c0443c60f8dc..ee8daa510d05 100644 --- a/dspace/config/emails/request_item.admin +++ b/dspace/config/emails/request_item.admin @@ -8,11 +8,13 @@ ## {4} the approver's email address ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'Request for Open Access') +#set($subject = "${config.get('dspace.name')}: Request for Open Access") ${params[3]}, with address ${params[4]}, requested the following document/file to be in Open Access: -Document Handle:${params[1]} +Document Handle: ${params[1]} File ID: ${params[0]} -Token:${params[2]} +Token: ${params[2]} + +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/request_item.author b/dspace/config/emails/request_item.author index ac79270e7fbc..eb9c4f38f661 100644 --- a/dspace/config/emails/request_item.author +++ b/dspace/config/emails/request_item.author @@ -11,7 +11,7 @@ ## 8 corresponding author email ## 9 configuration property "dspace.name" ## 10 configuration property "mail.helpdesk" -#set($subject = 'Request copy of document') +#set($subject = "${config.get('dspace.name')}: Request copy of document") Dear ${params[7]}, @@ -21,10 +21,12 @@ This request came along with the following message: "${params[5]}" -To answer, click ${params[6]}. Whether you choose to grant or deny the request, we think that it''s in your best interest to respond. +To answer, click ${params[6]}. Whether you choose to grant or deny the request, we think that it's in your best interest to respond. -IF YOU ARE NOT AN AUTHOR OF THIS DOCUMENT, and only submitted the document on the author''s behalf, PLEASE REDIRECT THIS MESSAGE TO THE AUTHOR(S). Only the author(s) should answer the request to send a copy. +IF YOU ARE NOT AN AUTHOR OF THIS DOCUMENT, and only submitted the document on the author's behalf, PLEASE REDIRECT THIS MESSAGE TO THE AUTHOR(S). Only the author(s) should answer the request to send a copy. IF YOU ARE AN AUTHOR OF THE REQUESTED DOCUMENT, thank you for your cooperation! If you have any questions concerning this request, please contact ${params[10]}. + +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/request_item.granted b/dspace/config/emails/request_item.granted new file mode 100644 index 000000000000..37ee5c29bd0c --- /dev/null +++ b/dspace/config/emails/request_item.granted @@ -0,0 +1,26 @@ +## Sent to the person requesting a copy of a restricted document when the +## request is granted. +## +## Parameters: +## {0} name of the requestor +## {1} Handle URL of the requested Item +## {2} title of the requested Item +## {3} name of the grantor +## {4} email address of the grantor (unused) +## {5} custom message sent by the grantor. +#set($subject = 'Request for Copy of Restricted Document is Granted') +Dear ${params[0]}: + +Your request for a copy of the file(s) from the below document has been approved by ${params[3]}. You may find the requested file(s) attached. + + ${params[2]} + ${params[1]} +#if( $params[5] ) + +An additional message from ${params[3]} follows: + +${params[5]} +#end + +Best regards, +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/request_item.rejected b/dspace/config/emails/request_item.rejected new file mode 100644 index 000000000000..c5a13860b648 --- /dev/null +++ b/dspace/config/emails/request_item.rejected @@ -0,0 +1,26 @@ +## Sent to the person requesting a copy of a restricted document when the +## request is denied. +## +## Parameters: +## {0} name of the requestor +## {1} Handle URL of the requested Item +## {2} title of the requested Item +## {3} name of the grantor +## {4} email address of the grantor (unused) +## {5} custom message sent by the grantor. +#set($subject = 'Request for Copy of Restricted Document is Denied') +Dear ${params[0]}: + +Your request for a copy of the file(s) from the below document has been denied by ${params[3]}. + + ${params[2]} + ${params[1]} +#if( $params[5] ) + +An additional message from ${params[3]} follows: + +${params[5]} +#end + +Best regards, +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/submit_archive b/dspace/config/emails/submit_archive index d3d62f7f4d07..ea1c31a75599 100644 --- a/dspace/config/emails/submit_archive +++ b/dspace/config/emails/submit_archive @@ -4,13 +4,13 @@ ## {1} Name of collection ## {2} handle ## -#set($subject = 'DSpace: Submission Approved and Archived') +#set($subject = "${config.get('dspace.name')}: Submission Approved and Archived") You submitted: ${params[0]} To collection: ${params[1]} -Your submission has been accepted and archived in DSpace, +Your submission has been accepted and archived in ${config.get('dspace.name')}, and it has been assigned the following identifier: ${params[2]} @@ -18,4 +18,4 @@ Please use this identifier when citing your submission. Many thanks! -DSpace +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/submit_reject b/dspace/config/emails/submit_reject index 44e6cf2cd9f3..f5376cb3a03b 100644 --- a/dspace/config/emails/submit_reject +++ b/dspace/config/emails/submit_reject @@ -6,7 +6,7 @@ ## {3} Reason for the rejection ## {4} Link to 'My DSpace' page ## -#set($subject = 'DSpace: Submission Rejected') +#set($subject = "${config.get('dspace.name')}: Submission Rejected") You submitted: ${params[0]} @@ -17,7 +17,6 @@ with the following explanation: ${params[3]} -Your submission has not been deleted. You can access it from your -"My DSpace" page: ${params[4]} +Your submission has not been deleted. You can access it from your "My${config.get('dspace.shortname')}" page: ${params[4]} -DSpace +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/submit_task b/dspace/config/emails/submit_task index 8c8b4a7e7245..f68bac80b186 100644 --- a/dspace/config/emails/submit_task +++ b/dspace/config/emails/submit_task @@ -6,7 +6,7 @@ ## {3} Description of task ## {4} link to 'my DSpace' page ## -#set($subject = 'DSpace: You have a new task') +#set($subject = "${config.get('dspace.name')}: You have a new task") A new item has been submitted: @@ -16,9 +16,9 @@ Submitted by: ${params[2]} ${params[3]} -To claim this task, please visit your "My DSpace" +To claim this task, please visit your "My${config.get('dspace.shortname')}" page: ${params[4]} Many thanks! -DSpace +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/subscriptions_content b/dspace/config/emails/subscriptions_content index 2b8289af278c..9d2d255c06d3 100644 --- a/dspace/config/emails/subscriptions_content +++ b/dspace/config/emails/subscriptions_content @@ -1,16 +1,28 @@ ## E-mail sent to designated address about updates on subscribed items ## -## Parameters: {0} Collections updates -## {1} Communities updates -#set($subject = 'DSpace: Your Content Subscriptions') +## Parameters: {0} Link to subscriptions page +## {1} Collections updates block +## {2} Communities updates block +## {3} Entity updates block +#set($subject = "${config.get('dspace.name')}: Statistics of updates on subscribed items") -This email is sent from DSpace-CRIS based on the chosen subscription preferences. +This email is sent from ${config.get('dspace.name')} based on the chosen subscription preferences. +You can manage your subscription preferences from ${params[0]} +#if( not( "$params[1]" == "" )) Communities ------------ -List of changed items : ${params[0]} +------------------- +${params[1]} +#end +#if( not( "$params[2]" == "" )) Collections ------------ -List of changed items : ${params[1]} +------------------- +${params[2]} +#end +#if( not( "$params[3]" == "" )) +Entities +------------------- +${params[3]} +#end \ No newline at end of file diff --git a/dspace/config/emails/validation_orcid b/dspace/config/emails/validation_orcid new file mode 100644 index 000000000000..ec11b708ec5d --- /dev/null +++ b/dspace/config/emails/validation_orcid @@ -0,0 +1,22 @@ +## E-mail sent to DSpace users when they confirm the orcid email address for the account +## +## Parameters: {0} is expanded to a special registration URL +## +## See org.dspace.core.Email for information on the format of this file. +## +#set($subject = "${config.get('dspace.name')} Account Registration") +#set($phone = ${config.get('mail.message.helpdesk.telephone')}) +To confirm your email and create the needed account, please click the link +below: + + ${params[0]} + +If you need assistance with your account, please email + + ${config.get("mail.helpdesk")} +#if( $phone ) + +or call us at ${phone}. +#end + +The DSpace-CRIS Team diff --git a/dspace/config/emails/welcome b/dspace/config/emails/welcome index febc082e072e..1c22cb05e830 100644 --- a/dspace/config/emails/welcome +++ b/dspace/config/emails/welcome @@ -3,13 +3,12 @@ ## See org.dspace.core.Email for information on the format of this file. ## #set($subject = "Welcome new registered ${config.get('dspace.name')} user!") -Thank you for registering an account. Your new account can be used immediately +Thank you for registering an account. Your new account can be used immediately to subscribe to notices of new content arriving in collections of your choice. Your new account can also be granted privileges to submit new content, or to edit and/or approve submissions. -If you need assistance with your account, please email -${config.get("mail.admin")}. +If you need assistance with your account, please email ${config.get("mail.helpdesk")}. The ${config.get('dspace.name')} Team diff --git a/dspace/config/entities/merge-relationship-types.xml b/dspace/config/entities/merge-relationship-types.xml new file mode 100644 index 000000000000..8db947319542 --- /dev/null +++ b/dspace/config/entities/merge-relationship-types.xml @@ -0,0 +1,126 @@ + + + + + + Project + Project + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Person + Person + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Funding + Funding + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + OrgUnit + OrgUnit + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Journal + Journal + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Publication + Publication + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Product + Product + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Patent + Patent + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Event + Event + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Equipment + Equipment + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + \ No newline at end of file diff --git a/dspace/config/hibernate.cfg.xml b/dspace/config/hibernate.cfg.xml index 563fd86735bc..9a3cdd967f0f 100644 --- a/dspace/config/hibernate.cfg.xml +++ b/dspace/config/hibernate.cfg.xml @@ -69,6 +69,7 @@ + @@ -56,11 +66,11 @@ oairecerif author affiliation - false onebox - Enter the affiliation of the author as stated on the publication. + false + Enter the affiliation of the author as stated on the publication. @@ -70,11 +80,11 @@ dc contributor editor - false onebox - The editors of this publication. + false You must enter at least the author. + The editors of this publication. @@ -82,11 +92,11 @@ oairecerif editor affiliation - false onebox - Enter the affiliation of the editor as stated on the publication. + false + Enter the affiliation of the editor as stated on the publication. @@ -96,11 +106,11 @@ dc relation funding - false onebox - Enter the name of funding, if any, that has supported this publication + false You must enter at least the funding name. + Enter the name of funding, if any, that has supported this publication @@ -108,11 +118,11 @@ dc relation grantno - false onebox - If the funding is not found in the system please enter the funding identifier / grant no + false + If the funding is not found in the system please enter the funding identifier / grant no @@ -122,11 +132,11 @@ dc relation funding - false onebox - Enter the name of funding, if any, that has supported this product + false You must enter at least the funding name. + Enter the name of funding, if any, that has supported this product @@ -134,11 +144,37 @@ dc relation grantno - false onebox + false + If the funding is not found in the system please enter the funding identifier / grant no + + + +
      + + + dc + relation + publication + + onebox + false + You must enter at least the publication title / citation + + Enter the publication title or citation, if any, that uses this product + + + dc + relation + doi + + onebox + false + + If the publication is not found in the system please enter the DOI identifier
      @@ -148,11 +184,11 @@ dc relation funding - false onebox - Enter the name of funding, if any, that has supported this patent + false You must enter at least the funding name. + Enter the name of funding, if any, that has supported this patent @@ -160,11 +196,59 @@ dc relation grantno - false onebox + false + If the funding is not found in the system please enter the funding identifier / grant no + + + +
      + + + crispatent + document + kind + + onebox + false + You must enter the kind code. + + + + + crispatent + document + issueDate + + onebox + false + You must enter the publication date. + + + + + crispatent + document + title + + onebox + false + + + + + + crispatent + document + description + + textarea + false + +
      @@ -174,45 +258,45 @@ oairecerif affiliation role - false - onebox - + false + + oairecerif person affiliation - false - onebox - + false You must enter at least the organisation of your affiliation. + + oairecerif affiliation startDate - false - date - + false + + oairecerif affiliation endDate - false - date - + false + + @@ -221,45 +305,45 @@ crisrp qualification - false - onebox - + false You must enter the organisation + + crisrp qualification role - false - onebox - + false You must enter the qualification title. + + crisrp qualification start - false - date - + false + + crisrp qualification end - false - date - + false + + @@ -269,44 +353,44 @@ crisrp education role - false - onebox - + false You must enter the degree/title + + crisrp education - false - onebox - + false You must enter the organisation + + crisrp education start - false - date - + false + + crisrp education end - false - date - + false + + @@ -316,11 +400,11 @@ dc contributor author - false - name - Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, J]. + onebox + false You must enter at least the author. + Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, J]. @@ -328,11 +412,11 @@ oairecerif author affiliation - false onebox - Enter the affiliation of the author as stated on the publication. + false + Enter the affiliation of the author as stated on the publication. @@ -342,11 +426,11 @@ dc contributor author - false - name - Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, J]. + onebox + false You must enter at least the inventor. + Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, J]. @@ -354,11 +438,11 @@ oairecerif author affiliation - false onebox - Enter the affiliation of the author as stated on the publication. + false + Enter the affiliation of the author as stated on the publication. @@ -368,11 +452,11 @@ oairecerif identifier url - false onebox - + false You must enter at least the site url. + @@ -380,11 +464,11 @@ crisrp site title - false onebox - + false + @@ -394,55 +478,55 @@ dc identifier issn - false onebox - + false + dc title - false onebox - + false You must enter a main title for this item. + dc publisher - false onebox - + false + dc subject - true tag - + true + dc description - false textarea - + false + @@ -451,110 +535,23 @@ dc identifier - true qualdrop_value - If the item has any identification numbers or codes associated with - it, please enter the types and the actual numbers or codes. - - - - - - dc - title - false - - onebox - Enter the main title of the item. - You must enter a main title for this item. - - - - - dc - title - alternative - true - - onebox - If the item has any alternative titles, please enter them here. - - - - - - dc - date - issued - false - - date - Please give the date of previous publication or public distribution. - You can leave out the day and/or month if they aren't - applicable. - You must enter at least the year. - - - - - dc - contributor - author - true - - group - Enter the names of the authors of this item. - - - - - - dc - contributor - editor true - - group - The editors of this publication. - - - - - dc - type - false - - onebox - Select the type(s) of content of the item. - You must select a publication type - publication-coar-types - - - -
      - - - dc - identifier - true - - qualdrop_value If the item has any identification numbers or codes associated with - it, please enter the types and the actual numbers or codes. - +it, please enter the types and the actual numbers or codes. dc title - false onebox - Enter the main title of the item. + false You must enter a main title for this item. + Enter the main title of the item. @@ -562,150 +559,11 @@ dc title alternative - true onebox - If the item has any alternative titles, please enter them here. - - - - - - dc - date - issued - false - - date - Please give the date of previous publication or public distribution. - You can leave out the day and/or month if they aren't - applicable. - You must enter at least the year. - - - - - dc - contributor - author - true - - group - Enter the names of the authors of this item. - - - - - - dc - contributor - group - true - - onebox - The editors of this publication. - - - - - - dc - type - false - - onebox - Select the type(s) of content of the item. - You must select a publication type - publication-coar-types - - -
      -
      - - - dc - contributor - author - false - - onebox - Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, J]. - You must enter at least the author. - - - - - oairecerif - author - affiliation - false - - onebox - Enter the affiliation of the author as stated on the publication. - - - -
      -
      - - - dc - contributor - editor - false - - onebox - The editors of this publication. - You must enter at least the author. - - - - - oairecerif - editor - affiliation - false - - onebox - Enter the affiliation of the editor as stated on the publication. - - - -
      -
      - - - dc - identifier true - - qualdrop_value - If the item has any identification numbers or codes associated with - it, please enter the types and the actual numbers or codes. - - - - - dc - title - false - - onebox - Enter the main title of the item. - You must enter a main title for this item. - - - - - dc - title - alternative - true - - onebox If the item has any alternative titles, please enter them here. - @@ -713,13 +571,13 @@ dc date issued - false date - Please give the date of previous publication or public distribution. - You can leave out the day and/or month if they aren't - applicable. + false You must enter at least the year. + Please give the date of previous publication or public distribution. + You can leave out the day and/or month if they aren't + applicable. @@ -727,113 +585,60 @@ dc contributor author - true group - Enter the names of the authors of this item. - - - - - - dc - contributor - group true - - onebox - The editors of this publication. - - - - - - dc - type - false - - onebox - Select the type(s) of content of the item. - You must select a publication type - publication-coar-types - - -
      -
      - - - dc - contributor - author - false - - onebox - Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, J]. - You must enter at least the author. - - - - - oairecerif - author - affiliation - false - - onebox - Enter the affiliation of the author as stated on the publication. + Enter the names of the authors of this item. -
      -
      dc contributor editor - false - - onebox + + group + true + The editors of this publication. - You must enter at least the author. - oairecerif - editor - affiliation - false - + dc + type + onebox - Enter the affiliation of the editor as stated on the publication. - + false + You must select a publication type + Select the type(s) of content of the item. + publication-coar-types
      -
      dc language iso - false dropdown - Select the language of the main content of the item. If the language does not appear in the list, please select 'Other'. If the content does not really have a language (for example, if it is a dataset or an image) please select 'N/A'. + false + Select the language of the main content of the item. If the language does not appear in the list, please select 'Other'. If the content does not really have a language (for example, if it is a dataset or an image) please select 'N/A'. dc subject - true tag - Enter appropriate subject keywords or phrases. + true + Enter appropriate subject keywords or phrases. @@ -841,11 +646,11 @@ datacite subject fos - true onebox - + true + oecd @@ -854,11 +659,11 @@ dc description abstract - false textarea - Enter the abstract of the item. + false + Enter the abstract of the item. @@ -868,12 +673,12 @@ dc relation publication - false - publication-coar-types:c_3248,publication-coar-types:c_5794,publication-coar-types:c_6670 onebox - The publication where this publication is included. E.g. a book chapter lists here the book, a contribution to a conference lists here the conference proceeding. + false + The publication where this publication is included. E.g. a book chapter lists here the book, a contribution to a conference lists here the conference proceeding. + publication-coar-types:c_3248,publication-coar-types:c_5794,publication-coar-types:c_6670 @@ -881,12 +686,12 @@ dc relation isbn - false - publication-coar-types:c_3248,publication-coar-types:c_5794,publication-coar-types:c_6670 onebox - The ISBN of the book/report if it was not found in the system + false + The ISBN of the book/report if it was not found in the system + publication-coar-types:c_3248,publication-coar-types:c_5794,publication-coar-types:c_6670 @@ -894,12 +699,12 @@ dc relation doi - false - publication-coar-types:c_3248,publication-coar-types:c_5794,publication-coar-types:c_6670 onebox - The DOI of the book/report if it was not found in the system + false + The DOI of the book/report if it was not found in the system + publication-coar-types:c_3248,publication-coar-types:c_5794,publication-coar-types:c_6670 @@ -907,11 +712,11 @@ dc relation journal - false onebox - The journal or Serie where this publication has been published + false + The journal or Serie where this publication has been published @@ -919,11 +724,11 @@ dc relation ispartofseries - true series - Enter the series and number assigned to this item by your community. + true + Enter the series and number assigned to this item by your community. @@ -931,11 +736,11 @@ dc relation issn - false onebox - The journal or Serie ISSN if it was not found in the system + false + The journal or Serie ISSN if it was not found in the system @@ -943,12 +748,12 @@ dc coverage publication - false - publication-coar-types:c_efa0,publication-coar-types:c_ba08 onebox - The publication object of the review + false + The publication object of the review + publication-coar-types:c_efa0,publication-coar-types:c_ba08 @@ -956,12 +761,12 @@ dc coverage isbn - false - publication-coar-types:c_efa0,publication-coar-types:c_ba08 onebox - The ISBN of the reviewed item if it was not found in the system + false + The ISBN of the reviewed item if it was not found in the system + publication-coar-types:c_efa0,publication-coar-types:c_ba08 @@ -969,24 +774,12 @@ dc coverage doi - false - publication-coar-types:c_efa0,publication-coar-types:c_ba08 - onebox - The DOI of the reviewed item if it was not found in the system - - - - - - dc - description - sponsorship - true - onebox - Enter the name of any sponsors. + false + The DOI of the reviewed item if it was not found in the system + publication-coar-types:c_efa0,publication-coar-types:c_ba08 @@ -994,11 +787,11 @@ oaire citation volume - false onebox - If applicable, the volume of the publishing channel where this publication appeared + false + If applicable, the volume of the publishing channel where this publication appeared @@ -1006,11 +799,11 @@ oaire citation issue - false onebox - If applicable, the issue of the publishing channel where this publication appeared + false + If applicable, the issue of the publishing channel where this publication appeared @@ -1018,11 +811,11 @@ oaire citation startPage - false onebox - If applicable, the page where this publication starts + false + If applicable, the page where this publication starts @@ -1030,11 +823,11 @@ oaire citation endPage - false onebox - If applicable, the page where this publication ends + false + If applicable, the page where this publication ends @@ -1044,11 +837,11 @@ dc relation funding - true group - Acknowledge the funding received for this publication. + true + Acknowledge the funding received for this publication. @@ -1056,11 +849,11 @@ dc relation project - true onebox - Enter the name of project, if any, that has produced this publication. It is NOT necessary to list the projects connected with an acknowledge funding. + true + Enter the name of project, if any, that has produced this publication. It is NOT necessary to list the projects connected with an acknowledge funding. @@ -1068,11 +861,11 @@ dc relation conference - true onebox - Enter the name of the conference where the item has been presented, if any. + true + Enter the name of the conference where the item has been presented, if any. @@ -1080,11 +873,11 @@ dc relation product - true onebox - Link the item to one or more existent dataset in the repository used or described by the publication or, put here the dataset citation + true + Link the item to one or more existent dataset in the repository used or described by the publication or, put here the dataset citation @@ -1092,22 +885,22 @@ dc identifier citation - false onebox - Enter the standard citation for the previously issued instance of this item. + false + Enter the standard citation for the previously issued instance of this item. dc description - false textarea - Enter any other description or comments in this box. + false + Enter any other description or comments in this box. @@ -1115,11 +908,11 @@ dc description sponsorship - true onebox - Enter the name of any sponsors. + true + Enter the name of any sponsors. @@ -1128,23 +921,23 @@ dc identifier - true qualdrop_value - If the item has any identification numbers or codes associated with - it, please enter the types and the actual numbers or codes. + true + If the item has any identification numbers or codes associated with +it, please enter the types and the actual numbers or codes. dc title - false onebox - Enter the main title of the item. + false You must enter a main title for this item. + Enter the main title of the item. @@ -1152,11 +945,11 @@ dc title alternative - true onebox - If the item has any alternative titles, please enter them here. + true + If the item has any alternative titles, please enter them here. @@ -1164,11 +957,11 @@ dc date issued - false date - Please give the date of previous publication or public distribution. You can leave out the day and/or month if they aren't applicable. + false You must enter at least the year. + Please give the date of previous publication or public distribution. You can leave out the day and/or month if they aren't applicable. @@ -1176,11 +969,11 @@ dc description version - false onebox - If applicable, the version of the product + false + If applicable, the version of the product @@ -1188,22 +981,22 @@ dc contributor author - true group - Enter the names of the authors of this item. + true + Enter the names of the authors of this item. dc type - false onebox - Nothing to do here. Note for administrators, this metadata could be completely hide using template item + false + Nothing to do here. Note for administrators, this metadata could be completely hide using template item product-coar-types @@ -1214,22 +1007,22 @@ dc language iso - false dropdown - Select, if applicable, the language of the main content of the item. If the language does not appear in the list, please select 'Other'. If the content does not really have a language (for example, if it is a dataset or an image) please select 'N/A'. + false + Select, if applicable, the language of the main content of the item. If the language does not appear in the list, please select 'Other'. If the content does not really have a language (for example, if it is a dataset or an image) please select 'N/A'. dc subject - true tag - Enter appropriate subject keywords or phrases. + true + Enter appropriate subject keywords or phrases. @@ -1237,11 +1030,11 @@ datacite subject fos - true onebox - + true + oecd @@ -1250,11 +1043,22 @@ dc description abstract - false textarea + false + Enter the abstract of the item. + + + + + dc + description + + textarea + false + Enter any other relevant information about the dataset. @@ -1263,11 +1067,11 @@ dc publisher - true onebox - The publisher or publishers of this product + true + The publisher or publishers of this product @@ -1275,11 +1079,11 @@ dc relation ispartofseries - true series - Link to the research output of which this product is a part (e.g. a data set collection that contains it). + true + Link to the research output of which this product is a part (e.g. a data set collection that contains it). @@ -1287,11 +1091,11 @@ dc relation issn - false onebox - The journal or Serie ISSN if it was not found in the system + false + The journal or Serie ISSN if it was not found in the system @@ -1299,11 +1103,11 @@ dc relation funding - true group - Acknowledge the funding received for this product. + true + Acknowledge the funding received for this product. @@ -1311,11 +1115,11 @@ dc relation project - true onebox - Enter the name of project, if any, that has produced this product. It is NOT necessary to list the projects connected with an acknowledge funding. + true + Enter the name of project, if any, that has produced this product. It is NOT necessary to list the projects connected with an acknowledge funding. @@ -1323,11 +1127,11 @@ dc relation conference - false onebox - The event where this product was presented or that is recorded in the product. + false + The event where this product was presented or that is recorded in the product. @@ -1335,11 +1139,11 @@ dc relation equipment - true onebox - The equipment that generated this product + true + The equipment that generated this product @@ -1347,11 +1151,11 @@ dc relation references - true onebox - Result outputs that are referenced by this product + true + Result outputs that are referenced by this product @@ -1359,11 +1163,11 @@ dc relation publication + + inline-group true - - onebox - Result outputs that use this product + Result outputs that use this product @@ -1373,66 +1177,75 @@ dc identifier patentno - false onebox - The patent number + false + The patent number + + crispatent + kind + + onebox + false + + The kind code + dc identifier applicationnumber - false onebox - The application number + false + The application number - dcterms - dateSubmitted - false - + dc + date + issued + date - Date on which the application was physically received at the Patent Authority. Also named Filling Date + false + Date on which the application was physically received at the Patent Authority. Also named Filling Date. dc title - false onebox - The title of the patent + false You must specify a title for the patent + The title of the patent dcterms dateAccepted - false date - Date on which the application has been granted by the Patent Office. + false + Date on which the application has been granted by the Patent Office. - dc - date - issued - false - + dcterms + dateSubmitted + date - Date of making available to the public by printing or similar process of a patent document on which grant has taken place on or before the said date + false + Date of making available to the public by printing or similar process of a patent document on which grant has taken place on or before the said date. @@ -1440,47 +1253,59 @@ dc contributor author - true group - The inventor: The actual devisor of an invention that is the subject of a patent. + true + The inventor: The actual devisor of an invention that is the subject of a patent. dcterms rightsHolder - true onebox - The holders of this patent + true + The holders of this patent dc publisher - true onebox - The issuer of the patent: the patent office + true + The issuer of the patent: the patent office dc type - false onebox - Select the type of content of the patent. + false You must select a patent type + Select the type of content of the patent. patent-coar-types + + + crispatent + document + kind + + inline-group + true + + + +
      @@ -1488,22 +1313,35 @@ dc language iso - false dropdown - Select the country and its language. + false + Select the country and its language. dc subject - true onebox + true + Enter appropriate subject keywords or phrases. + + + + + datacite + subject + fos + + onebox + true + + oecd @@ -1511,11 +1349,11 @@ dc description abstract - false textarea - Enter the description of the patent. + false + Enter the description of the patent.
      @@ -1525,11 +1363,11 @@ dc relation funding - true group - Acknowledge the funding received for this patent. + true + Acknowledge the funding received for this patent. @@ -1537,11 +1375,11 @@ dc relation project - true onebox - Enter the name of project, if any, that has produced this patent. It is NOT necessary to list the projects connected with an acknowledge funding. + true + Enter the name of project, if any, that has produced this patent. It is NOT necessary to list the projects connected with an acknowledge funding. @@ -1549,11 +1387,11 @@ dc relation patent - true onebox - Patents that precede (i.e., have priority over) this patent + true + Patents that precede (i.e., have priority over) this patent @@ -1561,11 +1399,11 @@ dc relation references - true onebox - Result outputs that are referenced by this patent + true + Result outputs that are referenced by this patent @@ -1574,22 +1412,22 @@ dc title - false name - + false You must enter least at the Surname. + crisrp name - false name - + false + @@ -1597,11 +1435,11 @@ crisrp name translated - false name - + false + @@ -1609,84 +1447,84 @@ crisrp name variant - true name - + true + person givenName - false onebox - + false + person familyName - false onebox - + false + person birthDate - false date - + false + oairecerif person gender - false dropdown - + false + person jobTitle - false onebox - + false + person affiliation name - false onebox - + false + crisrp workgroup - true onebox - + true + @@ -1694,33 +1532,33 @@ oairecerif identifier url - true group - + true + person email - false onebox - + false + dc subject - true tag - + true + @@ -1728,11 +1566,11 @@ datacite subject fos - true onebox - + true + oecd @@ -1741,11 +1579,11 @@ person identifier orcid - false onebox - Settable by connecting the entity with ORCID + false + Settable by connecting the entity with ORCID all @@ -1754,11 +1592,11 @@ person identifier scopus-author-id - true onebox - + true + @@ -1766,11 +1604,11 @@ person identifier rid - true onebox - + true + @@ -1778,11 +1616,11 @@ oairecerif person affiliation - true inline-group - + true + @@ -1790,55 +1628,55 @@ dc description abstract - false textarea - + false + crisrp education - true inline-group - + true + crisrp country - false dropdown - + false + crisrp qualification - true inline-group - + true + person knowsLanguage - true dropdown - + true + @@ -1846,11 +1684,11 @@ cris policy eperson - false onebox - + false + @@ -1858,11 +1696,11 @@ cris policy group - false onebox - + false + @@ -1871,77 +1709,77 @@ dc title - false onebox - + false You must enter the oganization name. + oairecerif acronym - false onebox - + false + organization parentOrganization - false onebox - + false + crisou director - false onebox - + false + organization foundingDate - false date - + false + crisou boards - true onebox - + true + organization identifier - true qualdrop_value - + true + @@ -1949,11 +1787,11 @@ oairecerif identifier url - true onebox - + true + @@ -1961,11 +1799,11 @@ dc description abstract - false textarea - + false + @@ -1973,32 +1811,32 @@ organization address addressLocality - false onebox - + false + organization address addressCountry - false dropdown - + false + dc type - false dropdown - + false You must specify the organisation type + @@ -2007,88 +1845,88 @@ dc title - false onebox - + false You must enter the project name. + oairecerif acronym - false onebox - + false + crispj coordinator - true onebox - + true + oairecerif internalid - false onebox - + false + crispj partnerou - true onebox - + true + crispj investigator - false onebox - + false You must enter the project coordinator. + crispj openaireid - false onebox - + false + crispj organization - true onebox - + true + @@ -2096,32 +1934,32 @@ oairecerif identifier url - true onebox - + true + oairecerif oamandate - false dropdown - + false + oairecerif oamandate url - false onebox - + false + @@ -2129,21 +1967,21 @@ oairecerif project startDate - false date - + false + oairecerif project endDate - false date - + false + @@ -2151,22 +1989,22 @@ oairecerif project status - false onebox - + false + dc type - false dropdown - + false + @@ -2174,33 +2012,33 @@ dc description abstract - false textarea - + false + crispj coinvestigators - true onebox - + true + dc subject - true tag - + true + @@ -2208,11 +2046,11 @@ datacite subject fos - true onebox - + true + oecd @@ -2221,11 +2059,11 @@ dc relation equipment - true onebox - + true + @@ -2234,31 +2072,31 @@ dc title - false onebox - + false You must enter the equipment name. + oairecerif acronym - false onebox - + false + oairecerif internalid - false onebox - + false + @@ -2266,31 +2104,31 @@ dc relation project - false onebox - + false + oairecerif funder - false onebox - + false + oairecerif fundingParent - false onebox - Link this funding with its upper level if applicable + false + Link this funding with its upper level if applicable @@ -2298,53 +2136,65 @@ crisfund award url - false onebox - + false The url preferably on the funder website of the award notice + + + + + + crisfund + award + uri + + onebox + false + The Award URI + oairecerif oamandate - false dropdown - + false + oairecerif oamandate url - false onebox - + false + oairecerif amount - false onebox - + false + oairecerif amount currency - false dropdown - + false + @@ -2352,97 +2202,97 @@ oairecerif funding identifier - false onebox - + false + oairecerif funding startDate - false date - + false + oairecerif funding endDate - false date - + false + dc type - false dropdown - + false + dc description - false textarea - + false + crisfund investigators - true onebox - + true + crisfund coinvestigators - true onebox - + true + crisfund leadorganizations - true onebox - + true + crisfund leadcoorganizations - true onebox - + true + @@ -2451,66 +2301,66 @@ dc title - false onebox - + false You must enter the equipment name. + oairecerif acronym - false onebox - + false + oairecerif internalid - false onebox - + false + crisequipment ownerou - false onebox - + false + crisequipment ownerrp - false onebox - + false + dc description - false textarea - + false + @@ -2519,33 +2369,33 @@ dc title - false onebox - + false + oairecerif acronym - false onebox - + false + dc type - false dropdown - + false + @@ -2553,21 +2403,21 @@ oairecerif event startDate - false date - + false + oairecerif event endDate - false date - + false + @@ -2575,11 +2425,11 @@ oairecerif event place - false onebox - + false + @@ -2587,77 +2437,77 @@ oairecerif event country - false dropdown - + false + crisevent organizerou - true onebox - + true + crisevent organizerpj - true onebox - + true + crisevent sponsorou - true onebox - + true + crisevent sponsorpj - true onebox - + true + crisevent partnerou - true onebox - + true + crisevent partnerpj - true onebox - + true + @@ -2665,22 +2515,22 @@ dc description abstract - false textarea - + false + dc subject - true tag - + true + @@ -2689,11 +2539,11 @@ cris owner - false onebox - + false + @@ -2854,6 +2704,10 @@ + + N/A + + English (United States) en_US @@ -3252,6 +3106,10 @@ + + Unspecified + + Academic Institute Academic Institute @@ -3306,6 +3164,10 @@ + + Unspecified + + basic research basic research @@ -3320,6 +3182,10 @@ + + Unspecified + + Gift Gift @@ -3342,6 +3208,10 @@ + + Unspecified + + Conference Conference @@ -3352,6 +3222,10 @@ + + Unspecified + + Afghanistan AF @@ -4350,6 +4224,10 @@ + + Unspecified + + Logo logo diff --git a/dspace/etc/conftool/cris-layout-configuration.xls b/dspace/etc/conftool/cris-layout-configuration.xls index 6a7d9daf8e80..d9c9ab9f0090 100644 Binary files a/dspace/etc/conftool/cris-layout-configuration.xls and b/dspace/etc/conftool/cris-layout-configuration.xls differ diff --git a/dspace/etc/migration/dspace_cris_migration_post_import.kjb b/dspace/etc/migration/dspace_cris_migration_post_import.kjb index b766c5d3723a..0c18927b5849 100644 --- a/dspace/etc/migration/dspace_cris_migration_post_import.kjb +++ b/dspace/etc/migration/dspace_cris_migration_post_import.kjb @@ -381,8 +381,8 @@ N Y 0 - 1448 - 112 + 1024 + 304 @@ -537,10 +537,49 @@ N Y 0 - 1290 + 1248 112 + + UPDATE dc_date_accessioned + + TRANS + + filename + + ${Internal.Entry.Current.Directory}/update_dc_date_accessioned.ktr + + N + N + N + N + N + N + + + N + N + Basic + N + + N + Y + N + N + N + Pentaho local + N + + Y + + N + Y + 1 + 1248 + 304 + + @@ -590,8 +629,17 @@ Migrate doi2item - Successo + UPDATE dc_date_accessioned 0 + 1 + Y + Y + N + + + UPDATE dc_date_accessioned + Successo + 1 0 Y Y diff --git a/dspace/etc/migration/migration_configuration.xls b/dspace/etc/migration/migration_configuration.xls index fe798d79a9b1..ae104fa6ce02 100644 Binary files a/dspace/etc/migration/migration_configuration.xls and b/dspace/etc/migration/migration_configuration.xls differ diff --git a/dspace/etc/migration/update_dc_date_accessioned.ktr b/dspace/etc/migration/update_dc_date_accessioned.ktr new file mode 100644 index 000000000000..85be8c3c05b0 --- /dev/null +++ b/dspace/etc/migration/update_dc_date_accessioned.ktr @@ -0,0 +1,634 @@ + + + + udpate_dc_date_accessioned + + + + Normal + / + + + + + + + + + + + + ID_BATCH + Y + ID_BATCH + + + CHANNEL_ID + Y + CHANNEL_ID + + + TRANSNAME + Y + TRANSNAME + + + STATUS + Y + STATUS + + + LINES_READ + Y + LINES_READ + + + + LINES_WRITTEN + Y + LINES_WRITTEN + + + + LINES_UPDATED + Y + LINES_UPDATED + + + + LINES_INPUT + Y + LINES_INPUT + + + + LINES_OUTPUT + Y + LINES_OUTPUT + + + + LINES_REJECTED + Y + LINES_REJECTED + + + + ERRORS + Y + ERRORS + + + STARTDATE + Y + STARTDATE + + + ENDDATE + Y + ENDDATE + + + LOGDATE + Y + LOGDATE + + + DEPDATE + Y + DEPDATE + + + REPLAYDATE + Y + REPLAYDATE + + + LOG_FIELD + Y + LOG_FIELD + + + EXECUTING_SERVER + N + EXECUTING_SERVER + + + EXECUTING_USER + N + EXECUTING_USER + + + CLIENT + N + CLIENT + + + + + +
      + + + + ID_BATCH + Y + ID_BATCH + + + SEQ_NR + Y + SEQ_NR + + + LOGDATE + Y + LOGDATE + + + TRANSNAME + Y + TRANSNAME + + + STEPNAME + Y + STEPNAME + + + STEP_COPY + Y + STEP_COPY + + + LINES_READ + Y + LINES_READ + + + LINES_WRITTEN + Y + LINES_WRITTEN + + + LINES_UPDATED + Y + LINES_UPDATED + + + LINES_INPUT + Y + LINES_INPUT + + + LINES_OUTPUT + Y + LINES_OUTPUT + + + LINES_REJECTED + Y + LINES_REJECTED + + + ERRORS + Y + ERRORS + + + INPUT_BUFFER_ROWS + Y + INPUT_BUFFER_ROWS + + + OUTPUT_BUFFER_ROWS + Y + OUTPUT_BUFFER_ROWS + + + + + +
      + + + ID_BATCH + Y + ID_BATCH + + + CHANNEL_ID + Y + CHANNEL_ID + + + LOG_DATE + Y + LOG_DATE + + + LOGGING_OBJECT_TYPE + Y + LOGGING_OBJECT_TYPE + + + OBJECT_NAME + Y + OBJECT_NAME + + + OBJECT_COPY + Y + OBJECT_COPY + + + REPOSITORY_DIRECTORY + Y + REPOSITORY_DIRECTORY + + + FILENAME + Y + FILENAME + + + OBJECT_ID + Y + OBJECT_ID + + + OBJECT_REVISION + Y + OBJECT_REVISION + + + PARENT_CHANNEL_ID + Y + PARENT_CHANNEL_ID + + + ROOT_CHANNEL_ID + Y + ROOT_CHANNEL_ID + + + + + +
      + + + ID_BATCH + Y + ID_BATCH + + + CHANNEL_ID + Y + CHANNEL_ID + + + LOG_DATE + Y + LOG_DATE + + + TRANSNAME + Y + TRANSNAME + + + STEPNAME + Y + STEPNAME + + + STEP_COPY + Y + STEP_COPY + + + LINES_READ + Y + LINES_READ + + + LINES_WRITTEN + Y + LINES_WRITTEN + + + LINES_UPDATED + Y + LINES_UPDATED + + + LINES_INPUT + Y + LINES_INPUT + + + LINES_OUTPUT + Y + LINES_OUTPUT + + + LINES_REJECTED + Y + LINES_REJECTED + + + ERRORS + Y + ERRORS + + + LOG_FIELD + N + LOG_FIELD + + + + + +
      + + + ID_BATCH + Y + ID_BATCH + + + CHANNEL_ID + Y + CHANNEL_ID + + + LOG_DATE + Y + LOG_DATE + + + METRICS_DATE + Y + METRICS_DATE + + + METRICS_CODE + Y + METRICS_CODE + + + METRICS_DESCRIPTION + Y + METRICS_DESCRIPTION + + + METRICS_SUBJECT + Y + METRICS_SUBJECT + + + METRICS_TYPE + Y + METRICS_TYPE + + + METRICS_VALUE + Y + METRICS_VALUE + + + + + +
      + + 0.0 + 0.0 + + 10000 + 50 + 50 + N + Y + 50000 + Y + + N + 1000 + 100 + + + + + + + + + - + 2021/07/23 16:30:09.987 + - + 2021/07/23 16:30:09.987 + H4sIAAAAAAAAAAMAAAAAAAAAAAA= + N + + + + + dspace + ${db_host_name} + POSTGRESQL + Native + ${db_name} + ${db_port_number} + ${db_username} + ${db_password} + + + + + + EXTRA_OPTION_POSTGRESQL.stringtype + unspecified + + + FORCE_IDENTIFIERS_TO_LOWERCASE + N + + + FORCE_IDENTIFIERS_TO_UPPERCASE + N + + + IS_CLUSTERED + N + + + PORT_NUMBER + ${db_port_number} + + + PRESERVE_RESERVED_WORD_CASE + Y + + + QUOTE_ALL_FIELDS + N + + + SUPPORTS_BOOLEAN_DATA_TYPE + Y + + + SUPPORTS_TIMESTAMP_DATA_TYPE + Y + + + USE_POOLING + N + + + + + + Generate rows + Execute SQL script + Y + + + + Execute SQL script + ExecSQL + + Y + + 1 + + none + + + dspace + N + Y + N + N +DO $$ +DECLARE + row_data record; + metadataFieldLegacyId INT; + metadataFieldDateId INT; + row_count integer := 0; +BEGIN + + SELECT m.metadata_field_id + INTO metadataFieldLegacyId + FROM metadatafieldregistry m JOIN metadataschemaregistry s + ON m.metadata_schema_id = s.metadata_schema_id + WHERE s.short_id = 'cris' AND m.element = 'legacyId'; + + SELECT m.metadata_field_id + INTO metadataFieldDateId + FROM metadatafieldregistry m JOIN metadataschemaregistry s + ON m.metadata_schema_id = s.metadata_schema_id + WHERE s.short_id = 'dc' AND m.element = 'date' AND m.qualifier = 'accessioned'; + + FOR row_data IN ( + SELECT m.dspace_object_id, d.timestampcreated + FROM old_cris_do d + JOIN metadatavalue m ON d.crisid = m.text_value + WHERE m.metadata_field_id = metadataFieldLegacyId + + UNION ALL + SELECT m.dspace_object_id, d.timestampcreated + FROM old_cris_rpage d + JOIN metadatavalue m ON d.crisid = m.text_value + WHERE m.metadata_field_id = metadataFieldLegacyId + + UNION ALL + SELECT m.dspace_object_id, d.timestampcreated + FROM old_cris_project d + JOIN metadatavalue m ON d.crisid = m.text_value + WHERE m.metadata_field_id = metadataFieldLegacyId + + UNION ALL + SELECT m.dspace_object_id, d.timestampcreated + FROM old_cris_orgunit d + JOIN metadatavalue m ON d.crisid = m.text_value + WHERE m.metadata_field_id = metadataFieldLegacyId + + ) + LOOP + UPDATE metadatavalue + SET text_value = TO_CHAR(row_data.timestampcreated, 'YYYY-MM-DD"T"HH24:MI:SS"Z"') + WHERE dspace_object_id = row_data.dspace_object_id + AND metadata_field_id = metadataFieldDateId; + + RAISE INFO 'Updating DSpace object with uuid %', row_data.dspace_object_id; + RAISE INFO 'New date is %', TO_CHAR(row_data.timestampcreated, 'YYYY-MM-DD"T"HH24:MI:SS"Z"'); + + row_count := row_count + 1; + END LOOP; + RAISE NOTICE 'Total items processed: %', row_count; +END $$; + + N + + + + + + + + + + + + + + + + 320 + 176 + Y + + + + Generate rows + RowGenerator + + Y + + 1 + + none + + + + + 1 + N + 5000 + now + FiveSecondsAgo + + + + + + + + + + 112 + 176 + Y + + + + + + + N + + diff --git a/dspace/modules/additions/pom.xml b/dspace/modules/additions/pom.xml index 63d68c380bf0..db2f30166a7d 100644 --- a/dspace/modules/additions/pom.xml +++ b/dspace/modules/additions/pom.xml @@ -17,7 +17,7 @@ org.dspace modules - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 .. @@ -61,22 +61,6 @@ - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - unit-test-environment @@ -265,22 +249,39 @@ - - addon-analytics - - - analytics.on - - - - - it.4science.dspace - addon-analytics-api - ${addon-analytics.version} - jar - - - + + addon-analytics + + + analytics.on + + + + + it.4science.dspace + addon-analytics-api + ${addon-analytics.version} + jar + + + + + + addon-dataquality + + + dq.on + + + + + it.4science.dspace + addon-dataquality + ${addon-dataquality.version} + jar + + + diff --git a/dspace/modules/pom.xml b/dspace/modules/pom.xml index 6c2fd62d85e6..5a4426dddfd0 100644 --- a/dspace/modules/pom.xml +++ b/dspace/modules/pom.xml @@ -11,7 +11,7 @@ org.dspace dspace-parent - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 ../../pom.xml @@ -64,5 +64,16 @@ server + + dspace-server-webapp-boot + + + server-boot/pom.xml + + + + server-boot + + diff --git a/dspace/modules/rest/pom.xml b/dspace/modules/rest/pom.xml index eb5975f6323f..8b80b4021668 100644 --- a/dspace/modules/rest/pom.xml +++ b/dspace/modules/rest/pom.xml @@ -13,7 +13,7 @@ org.dspace modules - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 .. @@ -90,24 +90,6 @@ - - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - - diff --git a/dspace/modules/server-boot/pom.xml b/dspace/modules/server-boot/pom.xml new file mode 100644 index 000000000000..90ceff73554a --- /dev/null +++ b/dspace/modules/server-boot/pom.xml @@ -0,0 +1,123 @@ + + 4.0.0 + org.dspace + server-boot + DSpace Server Webapp:: Executable JAR + + + + modules + org.dspace + cris-2023.02.02 + .. + + + + + ${basedir}/../../.. + + + + + + org.dspace.modules + additions + + + org.dspace + dspace-server-webapp + + + org.apache.solr + solr-solrj + + + + + org.dspace + dspace-api + test-jar + test + + + org.dspace + dspace-server-webapp + test-jar + test + + + org.springframework.boot + spring-boot-starter-test + test + + + org.springframework.security + spring-security-test + ${spring-security.version} + test + + + com.jayway.jsonpath + json-path-assert + ${json-path.version} + test + + + junit + junit + test + + + com.h2database + h2 + test + + + org.mockito + mockito-inline + test + + + + + org.apache.solr + solr-core + ${solr.client.version} + test + + + + org.apache.commons + commons-text + + + + + org.apache.lucene + lucene-analyzers-icu + test + + + + + + + + org.springframework.boot + spring-boot-maven-plugin + ${spring-boot.version} + + + + repackage + + + + + + + + diff --git a/dspace/modules/server-boot/src/main/java/org/dspace/app/ServerBootApplication.java b/dspace/modules/server-boot/src/main/java/org/dspace/app/ServerBootApplication.java new file mode 100644 index 000000000000..5efa79a02aca --- /dev/null +++ b/dspace/modules/server-boot/src/main/java/org/dspace/app/ServerBootApplication.java @@ -0,0 +1,33 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app; + +import org.dspace.app.rest.WebApplication; +import org.dspace.app.rest.utils.DSpaceConfigurationInitializer; +import org.dspace.app.rest.utils.DSpaceKernelInitializer; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.builder.SpringApplicationBuilder; + +/** + * Define the Spring Boot Application settings itself to be runned using an + * embedded application server. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@SuppressWarnings({ "checkstyle:hideutilityclassconstructor" }) +@SpringBootApplication(scanBasePackageClasses = WebApplication.class) +public class ServerBootApplication { + + public static void main(String[] args) { + new SpringApplicationBuilder(ServerBootApplication.class) + .initializers(new DSpaceKernelInitializer(), new DSpaceConfigurationInitializer()) + .run(args); + } + +} diff --git a/dspace/modules/server/pom.xml b/dspace/modules/server/pom.xml index 08ad7961e48c..0d26cdb5fa11 100644 --- a/dspace/modules/server/pom.xml +++ b/dspace/modules/server/pom.xml @@ -3,21 +3,16 @@ org.dspace.modules server war - DSpace Server Webapp:: Local Customizations - Overlay customizations. -This is probably a temporary solution to the build problems. We like to investigate about -the possibility to remove the overlays enable a more flexible extension mechanism. -The use of web-fragment and spring mvc technology allow us to add request handlers -just adding new jar in the classloader - + DSpace Server Webapp:: Tomcat deployable WAR modules org.dspace - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 .. + cris-2023.02.02-SNAPSHOT ${basedir}/../../.. @@ -73,6 +68,26 @@ just adding new jar in the classloader + + org.apache.maven.plugins + maven-dependency-plugin + + + unpack + prepare-package + + unpack-dependencies + + + runtime + org.dspace + dspace-server-webapp + **/static/**,**/*.properties + ${project.build.directory}/additions + + + + - + - + search.uniqueid diff --git a/dspace/src/main/docker-compose/README.md b/dspace/src/main/docker-compose/README.md index a83a466bdbba..35a6e6055433 100644 --- a/dspace/src/main/docker-compose/README.md +++ b/dspace/src/main/docker-compose/README.md @@ -1,4 +1,4 @@ -# Docker Compose Resources +# Docker Compose files for DSpace Backend *** :warning: **THESE IMAGES ARE NOT PRODUCTION READY** The below Docker Compose images/resources were built for development/testing only. Therefore, they may not be fully secured or up-to-date, and should not be used in production. @@ -6,27 +6,51 @@ If you wish to run DSpace on Docker in production, we recommend building your own Docker images. You are welcome to borrow ideas/concepts from the below images in doing so. But, the below images should not be used "as is" in any production scenario. *** -## root directory Resources + +## Overview +The scripts in this directory can be used to start the DSpace REST API (backend) in Docker. +Optionally, the DSpace User Interface (frontend) may also be started in Docker. + +For additional options/settings in starting the User Interface (frontend) in Docker, see the Docker Compose +documentation for the frontend: https://github.com/DSpace/dspace-angular/blob/main/docker/README.md + +## Primary Docker Compose Scripts (in root directory) +The root directory of this project contains the primary Dockerfiles & Docker Compose scripts +which are used to start the backend. + - docker-compose.yml - - Docker compose file to orchestrate DSpace 7 REST components -- docker-compose-cli - - Docker compose file to run DSpace CLI tasks within a running DSpace instance in Docker + - Docker compose file to orchestrate DSpace REST API (backend) components. + - Uses the `Dockerfile` in the same directory. +- docker-compose-cli.yml + - Docker compose file to run DSpace CLI (Command Line Interface) tasks within a running DSpace instance in Docker. See instructions below. + - Uses the `Dockerfile.cli` in the same directory. -## dspace/src/main/docker-compose resources +Documentation for all Dockerfiles used by these compose scripts can be found in the ["docker" folder README](../docker/README.md) + +## Additional Docker Compose tools (in ./dspace/src/main/docker-compose) - cli.assetstore.yml - Docker compose file that will download and install a default assetstore. + - The default assetstore is the configurable entities test dataset. Useful for [testing/demos of Entities](#Ingest Option 2 Ingest Entities Test Data). - cli.ingest.yml - - Docker compose file that will run an AIP ingest into DSpace 7. + - Docker compose file that will run an AIP ingest into DSpace 7. Useful for testing/demos with basic Items. - db.entities.yml - - Docker compose file that pre-populate a database instance using a SQL dump. The default dataset is the configurable entities test dataset. -- local.cfg - - Sets the environment used across containers run with docker-compose + - Docker compose file that pre-populate a database instance using a downloaded SQL dump. + - The default dataset is the configurable entities test dataset. Useful for [testing/demos of Entities](#Ingest Option 2 Ingest Entities Test Data). +- db.restore.yml + - Docker compose file that pre-populate a database instance using a *local* SQL dump (hardcoded to `./pgdump.sql`) + - Useful for restoring data from a local backup, or [Upgrading PostgreSQL in Docker](#Upgrading PostgreSQL in Docker) - docker-compose-angular.yml - - Docker compose file that will start a published DSpace angular container that interacts with the branch. + - Docker compose file that will start a published DSpace User Interface container that interacts with the branch. - docker-compose-shibboleth.yml - Docker compose file that will start a *test/demo* Shibboleth SP container (in Apache) that proxies requests to the DSpace container - ONLY useful for testing/development. NOT production ready. +- docker-compose-iiif.yml + - Docker compose file that will start a *test/demo* Cantaloupe image server container required for enabling IIIF support. + - ONLY useful for testing/development. NOT production ready. + +Documentation for all Dockerfiles used by these compose scripts can be found in the ["docker" folder README](../docker/README.md) + ## To refresh / pull DSpace images from Dockerhub ``` @@ -55,6 +79,12 @@ docker-compose -p d7 up -d docker-compose -p d7 -f docker-compose.yml -f dspace/src/main/docker-compose/docker-compose-angular.yml up -d ``` +## Run DSpace REST and DSpace Angular from local branches + +*Allows you to run the backend from the "DSpace/DSpace" codebase while also running the frontend from the "DSpace/dspace-angular" codebase.* + +See documentation in [DSpace User Interface Docker instructions](https://github.com/DSpace/dspace-angular/blob/main/docker/README.md#run-dspace-rest-and-dspace-angular-from-local-branches). + ## Run DSpace 7 REST with a IIIF Image Server from your branch *Only useful for testing IIIF support in a development environment* @@ -67,7 +97,6 @@ docker-compose -p d7 -f docker-compose.yml -f dspace/src/main/docker-compose/doc ``` ## Run DSpace 7 REST and Shibboleth SP (in Apache) from your branch - *Only useful for testing Shibboleth in a development environment* This Shibboleth container uses https://samltest.id/ as an IdP (see `../docker/dspace-shibboleth/`). @@ -143,21 +172,11 @@ The remainder of these instructions assume you are using ngrok (though other pro DSPACE_HOSTNAME=[subdomain].ngrok.io docker-compose -p d7 -f docker-compose.yml -f dspace/src/main/docker-compose/docker-compose-angular.yml -f dspace/src/main/docker-compose/docker-compose-shibboleth.yml up -d ``` -## Run DSpace 7 REST and Angular from local branches +## Sample Test Data -_The system will be started in 2 steps. Each step shares the same docker network._ +### Ingesting test content from AIP files -From DSpace/DSpace -``` -docker-compose -p d7 up -d -``` - -From DSpace/DSpace-angular (build as needed) -``` -docker-compose -p d7 -f docker/docker-compose.yml up -d -``` - -## Ingest Option 1: Ingesting test content from AIP files into a running DSpace 7 instance +*Allows you to ingest a set of AIPs into your DSpace instance for testing/demo purposes.* These AIPs represent basic Communities, Collections and Items. Prerequisites - Start DSpace 7 using one of the options listed above @@ -173,8 +192,14 @@ Download a Zip file of AIP content and ingest test data docker-compose -p d7 -f docker-compose-cli.yml -f dspace/src/main/docker-compose/cli.ingest.yml run --rm dspace-cli ``` -## Ingest Option 2: Ingest Entities Test Data -_Remove your d7 volumes if you already ingested content into your docker volumes_ +### Ingest Entities Test Data + +*Allows you to load Configurable Entities test data for testing/demo purposes.* + +Prerequisites +- Start DSpace 7 using one of the options listed above +- Build the DSpace CLI image if needed. See the instructions above. +- _Remove your d7 volumes if you already ingested content into your docker volumes_ Start DSpace REST with a postgres database dump downloaded from the internet. ``` @@ -212,3 +237,85 @@ Similarly, you can see the value of any DSpace configuration (in local.cfg or ds # Output the value of `dspace.ui.url` from running Docker instance docker-compose -p d7 -f docker-compose-cli.yml run --rm dspace-cli dsprop -p dspace.ui.url ``` + +NOTE: It is also possible to run CLI scripts directly on the "dspace" container (where the backend runs) +This can be useful if you want to pass environment variables which override DSpace configs. +``` +# Run the "./dspace database clean" command from the "dspace" container +# Before doing so, it sets "db.cleanDisabled=false". +# WARNING: This will delete all your data. It's just an example of how to do so. +docker-compose -p d7 exec -e "db__P__cleanDisabled=false" dspace /dspace/bin/dspace database clean +``` + +## Upgrading PostgreSQL in Docker + +Occasionally, we update our `dspace-postgres-*` images to use a new version of PostgreSQL. +Simply using the new image will likely throw errors as the pgdata (postgres data) directory is incompatible +with the new version of PostgreSQL. These errors look like: +``` +FATAL: database files are incompatible with server +DETAIL: The data directory was initialized by PostgreSQL version 11, which is not compatible with this version 13.10 +``` + +Here's how to fix those issues by migrating your old Postgres data to the new version of Postgres + +1. First, you must start up the older PostgreSQL image (to dump your existing data to a `*.sql` file) + ``` + # This command assumes you are using the process described above to start all your containers + docker-compose -p d7 up -d + ``` + * If you've already accidentally updated to the new PostgreSQL image, you have a few options: + * Pull down an older version of the image from Dockerhub (using a tag) + * Or, temporarily rebuild your local image with the old version of Postgres. For example: + ``` + # This command will rebuild using PostgreSQL v11 & tag it locally as "dspace-7_x" + docker build --build-arg POSTGRES_VERSION=11 -t dspace/dspace-postgres-pgcrypto:dspace-7_x ./dspace/src/main/docker/dspace-postgres-pgcrypto/ + # Then restart container with that image + docker-compose -p d7 up -d + ``` +2. Dump your entire "dspace" database out of the old "dspacedb" container to a local file named `pgdump.sql` + ``` + # NOTE: WE HIGHLY RECOMMEND LOGGING INTO THE CONTAINER and doing the pg_dump within the container. + # If you attempt to run pg_dump from your local machine via docker "exec" (or similar), sometimes + # UTF-8 characters can be corrupted in the export file. This may result in data loss. + + # First login to the "dspacedb" container + docker exec -it dspacedb /bin/bash + + # Dump the "dspace" database to a file named "/tmp/pgdump.sql" within the container + pg_dump -U dspace dspace > /tmp/pgdump.sql + + # Exit the container + exit + + # Download (copy) that /tmp/pgdump.sql backup file from container to your local machine + docker cp dspacedb:/tmp/pgdump.sql . + ``` +3. Now, stop all existing containers. This shuts down the old version of PostgreSQL + ``` + # This command assumes you are using the process described above to start/stop all your containers + docker-compose -p d7 down + ``` +4. Delete the `pgdata` volume. WARNING: This deletes all your old PostgreSQL data. Make sure you have that `pgdump.sql` file FIRST! + ``` + # Assumes you are using `-p d7` which prefixes all volumes with `d7_` + docker volume rm d7_pgdata + ``` +5. Now, pull down the latest PostgreSQL image with the NEW version of PostgreSQL. + ``` + docker-compose -f docker-compose.yml -f docker-compose-cli.yml pull + ``` +6. Start everything up using our `db.restore.yml` script. This script will recreate the database +using the local `./pgdump.sql` file. IMPORTANT: If you renamed that "pgdump.sql" file or stored it elsewhere, +then you MUST change the name/directory in the `db.restore.yml` script. + ``` + # Restore database from "./pgdump.sql" (this path is hardcoded in db.restore.yml) + docker-compose -p d7 -f docker-compose.yml -f dspace/src/main/docker-compose/db.restore.yml up -d + ``` +7. Finally, reindex all database contents into Solr (just to be sure Solr indexes are current). + ``` + # Run "./dspace index-discovery -b" using our CLI image + docker-compose -p d7 -f docker-compose-cli.yml run --rm dspace-cli index-discovery -b + ``` +At this point in time, all your old database data should be migrated to the new Postgres +and running at http://localhost:8080/server/ \ No newline at end of file diff --git a/dspace/src/main/docker-compose/db.entities.yml b/dspace/src/main/docker-compose/db.entities.yml index 8d86f7bb8359..32c54a5d0bd1 100644 --- a/dspace/src/main/docker-compose/db.entities.yml +++ b/dspace/src/main/docker-compose/db.entities.yml @@ -10,7 +10,7 @@ version: "3.7" services: dspacedb: - image: dspace/dspace-postgres-pgcrypto:loadsql + image: dspace/dspace-postgres-pgcrypto:dspace-7_x-loadsql environment: # This SQL is available from https://github.com/DSpace-Labs/AIP-Files/releases/tag/demo-entities-data - LOADSQL=https://github.com/DSpace-Labs/AIP-Files/releases/download/demo-entities-data/dspace7-entities-data.sql diff --git a/dspace/src/main/docker-compose/db.restore.yml b/dspace/src/main/docker-compose/db.restore.yml new file mode 100644 index 000000000000..fc2f30b9d8e0 --- /dev/null +++ b/dspace/src/main/docker-compose/db.restore.yml @@ -0,0 +1,26 @@ +# +# The contents of this file are subject to the license and copyright +# detailed in the LICENSE and NOTICE files at the root of the source +# tree and available online at +# +# http://www.dspace.org/license/ +# + +version: "3.7" + +# +# Overrides the default "dspacedb" container behavior to load a local SQL file into PostgreSQL. +# +# This can be used to restore a "dspacedb" container from a pg_dump, or during upgrade to a new version of PostgreSQL. +services: + dspacedb: + image: dspace/dspace-postgres-pgcrypto:dspace-7_x-loadsql + environment: + # Location where the dump SQL file will be available on the running container + - LOCALSQL=/tmp/pgdump.sql + volumes: + # Volume which shares a local SQL file at "./pgdump.sql" to the running container + # IF YOUR LOCAL FILE HAS A DIFFERENT NAME (or is in a different location), then change the "./pgdump.sql" + # portion of this line. + - ./pgdump.sql:/tmp/pgdump.sql + diff --git a/dspace/src/main/docker/README.md b/dspace/src/main/docker/README.md index 6c9da0190cd2..ac1b4cb9236b 100644 --- a/dspace/src/main/docker/README.md +++ b/dspace/src/main/docker/README.md @@ -1,4 +1,4 @@ -# Docker images supporting DSpace +# Docker images supporting DSpace Backend *** :warning: **THESE IMAGES ARE NOT PRODUCTION READY** The below Docker Compose images/resources were built for development/testing only. Therefore, they may not be fully secured or up-to-date, and should not be used in production. @@ -6,9 +6,15 @@ If you wish to run DSpace on Docker in production, we recommend building your own Docker images. You are welcome to borrow ideas/concepts from the below images in doing so. But, the below images should not be used "as is" in any production scenario. *** -## Dockerfile.dependencies +## Overview +The Dockerfiles in this directory (and subdirectories) are used by our [Docker Compose scripts](../docker-compose/README.md). + +## Dockerfile.dependencies (in root folder) This Dockerfile is used to pre-cache Maven dependency downloads that will be used in subsequent DSpace docker builds. +Caching these Maven dependencies provides a speed increase to all later builds by ensuring the dependencies +are only downloaded once. + ``` docker build -t dspace/dspace-dependencies:dspace-7_x -f Dockerfile.dependencies . ``` @@ -22,12 +28,13 @@ Admins to our DockerHub repo can manually publish with the following command. docker push dspace/dspace-dependencies:dspace-7_x ``` -## Dockerfile.test +## Dockerfile.test (in root folder) -This Dockerfile builds a DSpace 7 Tomcat image (for testing/development). -This image deploys two DSpace webapps: +This Dockerfile builds a DSpace 7 backend image (for testing/development). +This image deploys two DSpace webapps to Tomcat running in Docker: 1. The DSpace 7 REST API (at `http://localhost:8080/server`) -2. The legacy (v6) REST API (at `http://localhost:8080//rest`), deployed without requiring HTTPS access. +2. The legacy (v6) REST API (at `http://localhost:8080/rest`), deployed without requiring HTTPS access. +This image also sets up debugging in Tomcat for development. ``` docker build -t dspace/dspace:dspace-7_x-test -f Dockerfile.test . @@ -42,12 +49,12 @@ Admins to our DockerHub repo can manually publish with the following command. docker push dspace/dspace:dspace-7_x-test ``` -## Dockerfile +## Dockerfile (in root folder) -This Dockerfile builds a DSpace 7 tomcat image. -This image deploys two DSpace webapps: +This Dockerfile builds a DSpace 7 backend image. +This image deploys one DSpace webapp to Tomcat running in Docker: 1. The DSpace 7 REST API (at `http://localhost:8080/server`) -2. The legacy (v6) REST API (at `http://localhost:8080//rest`), deployed *requiring* HTTPS access. + ``` docker build -t dspace/dspace:dspace-7_x -f Dockerfile . ``` @@ -61,9 +68,9 @@ Admins to our DockerHub repo can publish with the following command. docker push dspace/dspace:dspace-7_x ``` -## Dockefile.cli +## Dockerfile.cli (in root folder) -This Dockerfile builds a DSpace 7 CLI image, which can be used to run commandline tools via Docker. +This Dockerfile builds a DSpace 7 CLI (command line interface) image, which can be used to run DSpace's commandline tools via Docker. ``` docker build -t dspace/dspace-cli:dspace-7_x -f Dockerfile.cli . ``` @@ -77,46 +84,60 @@ Admins to our DockerHub repo can publish with the following command. docker push dspace/dspace-cli:dspace-7_x ``` -## dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile +## ./dspace-postgres-pgcrypto/Dockerfile This is a PostgreSQL Docker image containing the `pgcrypto` extension required by DSpace 6+. +This image is built *automatically* after each commit is made to the `main` branch. + +How to build manually: ``` cd dspace/src/main/docker/dspace-postgres-pgcrypto -docker build -t dspace/dspace-postgres-pgcrypto . +docker build -t dspace/dspace-postgres-pgcrypto:dspace-7_x . ``` -**This image is built manually.** It should be rebuilt as needed. +It is also possible to change the version of PostgreSQL or the PostgreSQL user's password during the build: +``` +cd dspace/src/main/docker/dspace-postgres-pgcrypto +docker build -t dspace/dspace-postgres-pgcrypto:dspace-7_x --build-arg POSTGRES_VERSION=11 --build-arg POSTGRES_PASSWORD=mypass . +``` A copy of this file exists in the DSpace 6 branch. A specialized version of this file exists for DSpace 4 in DSpace-Docker-Images. -Admins to our DockerHub repo can publish with the following command. +Admins to our DockerHub repo can (manually) publish with the following command. ``` -docker push dspace/dspace-postgres-pgcrypto +docker push dspace/dspace-postgres-pgcrypto:dspace-7_x ``` -## dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile +## ./dspace-postgres-pgcrypto-curl/Dockerfile This is a PostgreSQL Docker image containing the `pgcrypto` extension required by DSpace 6+. This image also contains `curl`. The image is pre-configured to load a Postgres database dump on initialization. + +This image is built *automatically* after each commit is made to the `main` branch. + +How to build manually: ``` cd dspace/src/main/docker/dspace-postgres-pgcrypto-curl -docker build -t dspace/dspace-postgres-pgcrypto:loadsql . +docker build -t dspace/dspace-postgres-pgcrypto:dspace-7_x-loadsql . ``` -**This image is built manually.** It should be rebuilt as needed. +Similar to `dspace-postgres-pgcrypto` above, you can also modify the version of PostgreSQL or the PostgreSQL user's password. +See examples above. A copy of this file exists in the DSpace 6 branch. -Admins to our DockerHub repo can publish with the following command. +Admins to our DockerHub repo can (manually) publish with the following command. ``` -docker push dspace/dspace-postgres-pgcrypto:loadsql +docker push dspace/dspace-postgres-pgcrypto:dspace-7_x-loadsql ``` -## dspace/src/main/docker/dspace-shibboleth/Dockerfile +## ./dspace-shibboleth/Dockerfile This is a test / demo image which provides an Apache HTTPD proxy (in front of Tomcat) -with mod_shib & Shibboleth installed. It is primarily for usage for -testing DSpace's Shibboleth integration. It uses https://samltest.id/ as the Shibboleth IDP +with `mod_shib` & Shibboleth installed based on the +[DSpace Shibboleth configuration instructions](https://wiki.lyrasis.org/display/DSDOC7x/Authentication+Plugins#AuthenticationPlugins-ShibbolethAuthentication). +It is primarily for usage for testing DSpace's Shibboleth integration. +It uses https://samltest.id/ as the Shibboleth IDP **This image is built manually.** It should be rebuilt as needed. @@ -130,10 +151,28 @@ docker run -i -t -d -p 80:80 -p 443:443 dspace/dspace-shibboleth This image can also be rebuilt using the `../docker-compose/docker-compose-shibboleth.yml` script. +## ./dspace-solr/Dockerfile + +This Dockerfile builds a Solr image with DSpace Solr configsets included. It +can be pulled / built following the [docker compose resources](../docker-compose/README.md) +documentation. Or, to just build and/or run Solr: + +```bash +docker-compose build dspacesolr +docker-compose -p d7 up -d dspacesolr +``` + +If you're making iterative changes to the DSpace Solr configsets you'll need to rebuild / +restart the `dspacesolr` container for the changes to be deployed. From DSpace root: + +```bash +docker-compose -p d7 up --detach --build dspacesolr +``` -## test/ folder +## ./test/ folder These resources are bundled into the `dspace/dspace:dspace-*-test` image at build time. +See the `Dockerfile.test` section above for more information about the test image. ## Debugging Docker builds diff --git a/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile b/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile index 0e85dd33ce59..b2131a740262 100644 --- a/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile +++ b/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile @@ -6,14 +6,21 @@ # http://www.dspace.org/license/ # -# This will be deployed as dspace/dspace-postgres-pgcrpyto:loadsql -FROM postgres:11 +# To build for example use: +# docker build --build-arg POSTGRES_VERSION=13 --build-arg POSTGRES_PASSWORD=mypass ./dspace/src/main/docker/dspace-postgres-pgcrypto-curl/ +# This will be published as dspace/dspace-postgres-pgcrypto:$DSPACE_VERSION-loadsql + +ARG POSTGRES_VERSION=13 +ARG POSTGRES_PASSWORD=dspace + +FROM postgres:${POSTGRES_VERSION} ENV POSTGRES_DB dspace ENV POSTGRES_USER dspace -ENV POSTGRES_PASSWORD dspace +ENV POSTGRES_PASSWORD ${POSTGRES_PASSWORD} -# Load a SQL dump. Set LOADSQL to a URL for the sql dump file. -RUN apt-get update && apt-get install -y curl +# Install curl which is necessary to load SQL file +RUN apt-get update && apt-get install -y curl && rm -rf /var/lib/apt/lists/* +# Load a SQL dump. Set LOADSQL to a URL for the sql dump file. COPY install-pgcrypto.sh /docker-entrypoint-initdb.d/ diff --git a/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/install-pgcrypto.sh b/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/install-pgcrypto.sh index 054d3dede5dc..3f8e95e1044f 100644 --- a/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/install-pgcrypto.sh +++ b/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/install-pgcrypto.sh @@ -11,15 +11,33 @@ set -e CHECKFILE=/pgdata/ingest.hasrun.flag +# If $LOADSQL environment variable set, use 'curl' to download that SQL and run it in PostgreSQL +# This can be used to initialize a database based on test data available on the web. if [ ! -f $CHECKFILE -a ! -z ${LOADSQL} ] then - curl ${LOADSQL} -L -s --output /tmp/dspace.sql - psql -U $POSTGRES_USER < /tmp/dspace.sql + # Download SQL file to /tmp/dspace-db-init.sql + curl ${LOADSQL} -L -s --output /tmp/dspace-db-init.sql + # Load into PostgreSQL + psql -U $POSTGRES_USER < /tmp/dspace-db-init.sql + # Remove downloaded file + rm /tmp/dspace-db-init.sql touch $CHECKFILE exit fi +# If $LOCALSQL environment variable set, then simply run it in PostgreSQL +# This can be used to restore data from a pg_dump or similar. +if [ ! -f $CHECKFILE -a ! -z ${LOCALSQL} ] +then + # Load into PostgreSQL + psql -U $POSTGRES_USER < ${LOCALSQL} + + touch $CHECKFILE + exit +fi + +# Then, setup pgcrypto on this database psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" <<-EOSQL -- Create a new schema in this database named "extensions" (or whatever you want to name it) CREATE SCHEMA extensions; diff --git a/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile b/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile index 84b7569a2b2c..7dde1a6bfd1c 100644 --- a/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile +++ b/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile @@ -6,13 +6,18 @@ # http://www.dspace.org/license/ # -# This will be deployed as dspace/dspace-postgres-pgcrpyto:latest -FROM postgres:11 +# To build for example use: +# docker build --build-arg POSTGRES_VERSION=13 --build-arg POSTGRES_PASSWORD=mypass ./dspace/src/main/docker/dspace-postgres-pgcrypto/ +# This will be published as dspace/dspace-postgres-pgcrypto:$DSPACE_VERSION + +ARG POSTGRES_VERSION=13 +ARG POSTGRES_PASSWORD=dspace + +FROM postgres:${POSTGRES_VERSION} ENV POSTGRES_DB dspace ENV POSTGRES_USER dspace -ENV POSTGRES_PASSWORD dspace - -RUN apt-get update +ENV POSTGRES_PASSWORD ${POSTGRES_PASSWORD} +# Copy over script which will initialize database and install pgcrypto extension COPY install-pgcrypto.sh /docker-entrypoint-initdb.d/ diff --git a/dspace/src/main/docker/dspace-solr/Dockerfile b/dspace/src/main/docker/dspace-solr/Dockerfile new file mode 100644 index 000000000000..9fe9adf9440f --- /dev/null +++ b/dspace/src/main/docker/dspace-solr/Dockerfile @@ -0,0 +1,36 @@ +# +# The contents of this file are subject to the license and copyright +# detailed in the LICENSE and NOTICE files at the root of the source +# tree and available online at +# +# http://www.dspace.org/license/ +# + +# To build use root as context for (easier) access to solr cfgs +# docker build --build-arg SOLR_VERSION=8.11 -f ./dspace/src/main/docker/dspace-solr/Dockerfile . +# This will be published as dspace/dspace-solr:$DSPACE_VERSION + +ARG SOLR_VERSION=8.11 + +FROM solr:${SOLR_VERSION}-slim + +ENV AUTHORITY_CONFIGSET_PATH=/opt/solr/server/solr/configsets/authority/conf \ + OAI_CONFIGSET_PATH=/opt/solr/server/solr/configsets/oai/conf \ + SEARCH_CONFIGSET_PATH=/opt/solr/server/solr/configsets/search/conf \ + STATISTICS_CONFIGSET_PATH=/opt/solr/server/solr/configsets/statistics/conf + +USER root + +RUN mkdir -p $AUTHORITY_CONFIGSET_PATH && \ + mkdir -p $OAI_CONFIGSET_PATH && \ + mkdir -p $SEARCH_CONFIGSET_PATH && \ + mkdir -p $STATISTICS_CONFIGSET_PATH + +COPY dspace/solr/authority/conf/* $AUTHORITY_CONFIGSET_PATH/ +COPY dspace/solr/oai/conf/* $OAI_CONFIGSET_PATH/ +COPY dspace/solr/search/conf/* $SEARCH_CONFIGSET_PATH/ +COPY dspace/solr/statistics/conf/* $STATISTICS_CONFIGSET_PATH/ + +RUN chown -R solr:solr /opt/solr/server/solr/configsets + +USER solr diff --git a/pom.xml b/pom.xml index b25632a959ff..c7b81d3eeb35 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.dspace dspace-parent pom - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 DSpace Parent Project DSpace-CRIS is an open source extension of DSpace (http://www.dspace.org) providing out of box support for the CRIS / RIMS and moder Institution Repository use cases with advanced features and optimized configurations @@ -19,13 +19,13 @@ 11 - 5.3.20 - 2.6.8 - 5.6.5 - 5.6.5.Final - 6.0.23.Final - 42.4.3 - 8.11.1 + 5.3.27 + 2.7.12 + 5.7.8 + 5.6.15.Final + 6.2.5.Final + 42.6.0 + 8.11.2 3.4.0 2.10.0 @@ -37,12 +37,12 @@ 2.3.1 1.1.0 - 9.4.48.v20220622 - 2.17.1 - 2.0.27 - 1.18.0 - 1.7.25 - 2.3.0 + 9.4.53.v20231009 + 2.20.0 + 2.0.28 + 1.19.0 + 1.7.36 + 2.5.0 1.70 @@ -65,6 +65,7 @@ [CRIS-7.1-SNAPSHOT,CRIS-8.0-SNAPSHOT) [CRIS-7.0-SNAPSHOT,CRIS-8.0-SNAPSHOT) [CRIS-7.0-SNAPSHOT,CRIS-8.0-SNAPSHOT) + [CRIS-2023.02-SNAPSHOT,CRIS-2023.03-SNAPSHOT) UTF-8 @@ -938,7 +939,27 @@ - + + + + addon-dataquality + + false + + + + + it.4science.dspace + addon-dataquality + ${addon-dataquality.version} + jar + + + + + @@ -958,14 +979,14 @@ org.dspace dspace-rest - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 jar classes org.dspace dspace-rest - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 war @@ -1116,70 +1137,62 @@ org.dspace dspace-api - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 org.dspace dspace-api test-jar - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 test org.dspace.modules additions - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 org.dspace dspace-sword - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 org.dspace dspace-swordv2 - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 org.dspace dspace-oai - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 org.dspace dspace-services - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 org.dspace dspace-server-webapp test-jar - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 test org.dspace dspace-rdf - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 org.dspace dspace-iiif - cris-2023.01.01-SNAPSHOT + cris-2023.02.02 org.dspace dspace-server-webapp - cris-2023.01.01-SNAPSHOT - jar - classes - - - org.dspace - dspace-server-webapp - cris-2023.01.01-SNAPSHOT - war + cris-2023.02.02 @@ -1576,7 +1589,7 @@ commons-fileupload commons-fileupload - 1.3.3 + 1.5 commons-io @@ -1709,11 +1722,6 @@ icu4j 62.1 - - com.oracle - ojdbc6 - 11.2.0.4.0 - org.dspace @@ -1791,7 +1799,7 @@ com.h2database h2 - 2.1.210 + 2.2.220 test @@ -1869,7 +1877,7 @@ com.google.guava guava - 31.0.1-jre + 32.0.0-jre @@ -2029,7 +2037,7 @@ scm:git:git@github.com:4Science/DSpace.git scm:git:git@github.com:4Science/DSpace.git git@github.com:4Science/DSpace.git - dspace-cris-2022.02.00 + dspace-cris-2023.02.02